예제 #1
0
        public void CVImageBufferTransferFunctionTest()
        {
            TestRuntime.AssertXcodeVersion(9, 0);

            var codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferTransferFunction.ItuR2100Hlg);
            var matrixOption = CVImageBuffer.GetTransferFunctionOption(codepoint);

            Assert.AreEqual(CVImageBufferTransferFunction.ItuR2100Hlg, matrixOption, "ItuR2100Hlg");

            codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferTransferFunction.ItuR709_2);
            matrixOption = CVImageBuffer.GetTransferFunctionOption(codepoint);
            Assert.AreEqual(CVImageBufferTransferFunction.ItuR709_2, matrixOption, "ItuR709_2");

            codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferTransferFunction.Smpte240M1995);
            matrixOption = CVImageBuffer.GetTransferFunctionOption(codepoint);
            Assert.AreEqual(CVImageBufferTransferFunction.Smpte240M1995, matrixOption, "Smpte240M1995");

            codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferTransferFunction.SmpteST2084PQ);
            matrixOption = CVImageBuffer.GetTransferFunctionOption(codepoint);
            Assert.AreEqual(CVImageBufferTransferFunction.SmpteST2084PQ, matrixOption, "SmpteST2084PQ");

            codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferTransferFunction.SmpteST428_1);
            matrixOption = CVImageBuffer.GetTransferFunctionOption(codepoint);
            Assert.AreEqual(CVImageBufferTransferFunction.SmpteST428_1, matrixOption, "SmpteST428_1");
        }
예제 #2
0
        public static CMSampleBuffer CreateForImageBuffer(CVImageBuffer imageBuffer, bool dataReady, CMVideoFormatDescription formatDescription, CMSampleTimingInfo sampleTiming, out CMSampleBufferError error)
        {
            if (imageBuffer == null)
            {
                throw new ArgumentNullException("imageBuffer");
            }
            if (formatDescription == null)
            {
                throw new ArgumentNullException("formatDescription");
            }

            IntPtr buffer;

            error = CMSampleBufferCreateForImageBuffer(IntPtr.Zero,
                                                       imageBuffer.handle, dataReady,
                                                       IntPtr.Zero, IntPtr.Zero,
                                                       formatDescription.handle,
                                                       ref sampleTiming,
                                                       out buffer);

            if (error != CMSampleBufferError.None)
            {
                return(null);
            }

            return(new CMSampleBuffer(buffer, true));
        }
예제 #3
0
        public VTStatus EncodeFrame(CVImageBuffer imageBuffer, CMTime presentationTimestamp, CMTime duration,
                                    NSDictionary frameProperties, IntPtr sourceFrame, out VTEncodeInfoFlags infoFlags,
                                    VTCompressionOutputHandler outputHandler)
        {
            if (Handle == IntPtr.Zero)
            {
                throw new ObjectDisposedException("CompressionSession");
            }
            if (imageBuffer == null)
            {
                throw new ArgumentNullException("imageBuffer");
            }
            if (outputHandler == null)
            {
                throw new ArgumentNullException("outputHandler");
            }

            unsafe {
                var block    = new BlockLiteral();
                var blockPtr = █
                block.SetupBlockUnsafe(compressionOutputHandlerTrampoline, outputHandler);

                try {
                    return(VTCompressionSessionEncodeFrameWithOutputHandler(Handle,
                                                                            imageBuffer.Handle, presentationTimestamp, duration,
                                                                            frameProperties == null ? IntPtr.Zero : frameProperties.Handle,
                                                                            out infoFlags, blockPtr));
                } finally {
                    blockPtr->CleanupBlock();
                }
            }
        }
예제 #4
0
        public void CVImageBufferColorPrimariesTest()
        {
            TestRuntime.AssertXcodeVersion(9, 0);

            var codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferColorPrimaries.ItuR2020);
            var matrixOption = CVImageBuffer.GetColorPrimariesOption(codepoint);

            Assert.AreEqual(CVImageBufferColorPrimaries.ItuR2020, matrixOption, "ItuR2020");

            codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferColorPrimaries.Ebu3213);
            matrixOption = CVImageBuffer.GetColorPrimariesOption(codepoint);
            Assert.AreEqual(CVImageBufferColorPrimaries.Ebu3213, matrixOption, "Ebu3213");

            codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferColorPrimaries.ItuR709_2);
            matrixOption = CVImageBuffer.GetColorPrimariesOption(codepoint);
            Assert.AreEqual(CVImageBufferColorPrimaries.ItuR709_2, matrixOption, "ItuR709_2");

            codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferColorPrimaries.P22);
            matrixOption = CVImageBuffer.GetColorPrimariesOption(codepoint);
            Assert.AreEqual(CVImageBufferColorPrimaries.P22, matrixOption, "P22");

            codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferColorPrimaries.SmpteC);
            matrixOption = CVImageBuffer.GetColorPrimariesOption(codepoint);
            Assert.AreEqual(CVImageBufferColorPrimaries.SmpteC, matrixOption, "SmpteC");
        }
예제 #5
0
        public virtual void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection avConnection)
        {
            CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer();
            var           pixelBuffer = imageBuffer as CVPixelBuffer;

            var bufferSize = pixelBuffer.Height * pixelBuffer.BytesPerRow;

            if (bytes.Length != bufferSize)
            {
                bytes = new byte[bufferSize];
            }

            pixelBuffer.Lock(CVPixelBufferLock.None);
            Marshal.Copy(pixelBuffer.BaseAddress, bytes, 0, bytes.Length);
            pixelBuffer.Unlock(CVPixelBufferLock.None);

            var image = SixLabors.ImageSharp.Image
                        .LoadPixelData <SixLabors.ImageSharp.PixelFormats.Rgb24>(
                SixLabors.ImageSharp.Configuration.Default,
                bytes, (int)pixelBuffer.Width, (int)pixelBuffer.Height);

            string asciiImage = ImageConverter.ImageToAsciiArt(image);

            connection.InvokeAsync("SendFrame", asciiImage);
        }
예제 #6
0
            public unsafe void ProcessPixelBuffer(CVImageBuffer imageBuffer)
            {
                using (CVPixelBuffer pixelBuffer = imageBuffer as CVPixelBuffer)
                {
                    pixelBuffer.Lock(CVOptionFlags.None);

                    int   bufferWidth  = pixelBuffer.Width;
                    int   bufferHeight = pixelBuffer.Height;
                    byte *pixelPtr     = (byte *)pixelBuffer.BaseAddress.ToPointer();

                    int position = 0;
                    for (var row = 0; row < bufferHeight; row++)
                    {
                        for (var column = 0; column < bufferWidth; column++)
                        {
                            // De-green (Second pixel in BGRA is green)
                            *(pixelPtr + 1) = 0;
                            pixelPtr       += BYTES_PER_PIXEL;
                            position       += BYTES_PER_PIXEL;                       // For each pixel increase the offset by the number of bytes per pixel
                        }
                    }

                    pixelBuffer.Unlock(CVOptionFlags.None);
                }
            }
예제 #7
0
        public static CMSampleBuffer?CreateForImageBuffer(CVImageBuffer imageBuffer, bool dataReady, CMVideoFormatDescription formatDescription, CMSampleTimingInfo sampleTiming, out CMSampleBufferError error)
        {
            if (imageBuffer is null)
            {
                ObjCRuntime.ThrowHelper.ThrowArgumentNullException(nameof(imageBuffer));
            }
            if (formatDescription is null)
            {
                ObjCRuntime.ThrowHelper.ThrowArgumentNullException(nameof(formatDescription));
            }

            IntPtr buffer;

            error = CMSampleBufferCreateForImageBuffer(IntPtr.Zero,
                                                       imageBuffer.Handle, dataReady,
                                                       IntPtr.Zero, IntPtr.Zero,
                                                       formatDescription.Handle,
                                                       ref sampleTiming,
                                                       out buffer);

            if (error != CMSampleBufferError.None)
            {
                return(null);
            }

            return(new CMSampleBuffer(buffer, true));
        }
 public bool VideoMatchesImageBuffer(CVImageBuffer imageBuffer)
 {
     if (imageBuffer == null)
     {
         throw new ArgumentNullException("imageBuffer");
     }
     return(CMVideoFormatDescriptionMatchesImageBuffer(handle, imageBuffer.Handle));
 }
예제 #9
0
 public bool VideoMatchesImageBuffer(CVImageBuffer imageBuffer)
 {
     if (imageBuffer is null)
     {
         ObjCRuntime.ThrowHelper.ThrowArgumentNullException(nameof(imageBuffer));
     }
     return(CMVideoFormatDescriptionMatchesImageBuffer(Handle, imageBuffer.Handle));
 }
예제 #10
0
 public void OnPixelBufferReadyForDisplay(CVImageBuffer imageBuffer)
 {
     // Don't make OpenGLES calls while in the backgroud.
     if (UIApplication.SharedApplication.ApplicationState != UIApplicationState.Background)
     {
         oglView.DisplayPixelBuffer(imageBuffer);
     }
 }
예제 #11
0
        public override void WindowControllerDidLoadNib(NSWindowController windowController)
        {
            NSError err;

            // Create a movie, and store the information in memory on an NSMutableData
            movie = new QTMovie(new NSMutableData(1), out err);
            if (movie == null)
            {
                NSAlert.WithError(err).RunModal();
                return;
            }

            movieView.Movie = movie;

            // Find video device
            captureSession = new QTCaptureSession();
            var device = QTCaptureDevice.GetDefaultInputDevice(QTMediaType.Video);

            if (device == null)
            {
                new NSAlert {
                    MessageText = "You do not have a camera connected."
                }.BeginSheet(windowController.Window);
                return;
            }
            else if (!device.Open(out err))
            {
                NSAlert.WithError(err).BeginSheet(windowController.Window);
                return;
            }

            // Add device input
            captureInput = new QTCaptureDeviceInput(device);
            if (!captureSession.AddInput(captureInput, out err))
            {
                NSAlert.WithError(err).BeginSheet(windowController.Window);
                return;
            }

            // Create decompressor for video output, to get raw frames
            decompressedVideo = new QTCaptureDecompressedVideoOutput();
            decompressedVideo.DidOutputVideoFrame += delegate(object sender, QTCaptureVideoFrameEventArgs e) {
                lock (this) {
                    currentImage = e.VideoFrame;
                }
            };
            if (!captureSession.AddOutput(decompressedVideo, out err))
            {
                NSAlert.WithError(err).BeginSheet(windowController.Window);
                return;
            }

            // Activate preview
            captureView.CaptureSession = captureSession;

            // Start running.
            captureSession.StartRunning();
        }
예제 #12
0
 public static CMSampleBuffer CreateReadyWithImageBuffer(CVImageBuffer imageBuffer,
                                                         CMFormatDescription formatDescription, CMSampleTimingInfo[] sampleTiming, out CMSampleBufferError error)
 {
     if (sampleTiming == null)
     {
         throw new ArgumentNullException(nameof(sampleTiming));
     }
     if (sampleTiming.Length != 1)
     {
         throw new ArgumentException("Only a single sample is allowed.", nameof(sampleTiming));
     }
     return(CreateReadyWithImageBuffer(imageBuffer, formatDescription, sampleTiming, out error));
 }
예제 #13
0
		public override void WindowControllerDidLoadNib (NSWindowController windowController)
		{
			NSError err;

			// Create a movie, and store the information in memory on an NSMutableData
			movie = new QTMovie (new NSMutableData (1), out err);
			if (movie == null) {
				NSAlert.WithError (err).RunModal ();
				return;
			}

			movieView.Movie = movie;

			// Find video device
			captureSession = new QTCaptureSession ();
			var device = QTCaptureDevice.GetDefaultInputDevice (QTMediaType.Video);
			if (device == null) {
				new NSAlert { MessageText = "You do not have a camera connected." }.BeginSheet (windowController.Window);
				return;
			} else if (!device.Open (out err)) {
				NSAlert.WithError (err).BeginSheet (windowController.Window);
				return;
			}

			// Add device input
			captureInput = new QTCaptureDeviceInput (device);
			if (!captureSession.AddInput (captureInput, out err)) {
				NSAlert.WithError (err).BeginSheet (windowController.Window);
				return;
			}

			// Create decompressor for video output, to get raw frames
			decompressedVideo = new QTCaptureDecompressedVideoOutput ();
			decompressedVideo.DidOutputVideoFrame += delegate(object sender, QTCaptureVideoFrameEventArgs e) {
				lock (this) {
					currentImage = e.VideoFrame;
				}
			};
			if (!captureSession.AddOutput (decompressedVideo, out err)) {
				NSAlert.WithError (err).BeginSheet (windowController.Window);
				return;
			}

			// Activate preview
			captureView.CaptureSession = captureSession;

			// Start running.
			captureSession.StartRunning ();
		}
예제 #14
0
        public VTStatus EncodeFrame(CVImageBuffer imageBuffer, CMTime presentationTimestampe, CMTime duration,
                                    NSDictionary frameProperties, IntPtr sourceFrame, out VTEncodeInfoFlags infoFlags)
        {
            if (Handle == IntPtr.Zero)
            {
                throw new ObjectDisposedException("CompressionSession");
            }
            if (imageBuffer == null)
            {
                throw new ArgumentNullException("imageBuffer");
            }

            return(VTCompressionSessionEncodeFrame(Handle, imageBuffer.Handle, presentationTimestampe, duration,
                                                   frameProperties == null ? IntPtr.Zero : frameProperties.Handle,
                                                   sourceFrame, out infoFlags));
        }
예제 #15
0
        private CGImage CreateImage(CMSampleBuffer sampleBuffer)
        {
            CGImage image = null;

            CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription();
            var           subType     = formatDescription.MediaSubType;
            CMBlockBuffer blockBuffer = sampleBuffer.GetDataBuffer();

            if (blockBuffer != null)
            {
                if (subType != (int)CMVideoCodecType.JPEG)
                {
                    throw new Exception("Block buffer must be JPEG encoded.");
                }

                var jpegData = new NSMutableData();
                jpegData.Length = blockBuffer.DataLength;

                blockBuffer.CopyDataBytes(0, blockBuffer.DataLength, jpegData.Bytes);

                using (var imageSource = CGImageSource.FromData(jpegData)) {
                    var decodeOptions = new CGImageOptions {
                        ShouldAllowFloat = false,
                        ShouldCache      = false
                    };

                    image = imageSource.CreateImage(0, decodeOptions);
                }
            }
            else
            {
                if (subType != (int)CVPixelFormatType.CV32BGRA)
                {
                    throw new Exception("Image buffer must be BGRA encoded.");
                }

                CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer();

                using (var colorSpace = CGColorSpace.CreateDeviceRGB())
                    using (var bitmapContext = new CGBitmapContext(imageBuffer.Handle,
                                                                   (int)imageBuffer.DisplaySize.Width, (int)imageBuffer.DisplaySize.Height, 8, 0, colorSpace, CGImageAlphaInfo.NoneSkipFirst)) {
                        image = bitmapContext.ToImage();
                    }
            }

            return(image);
        }
예제 #16
0
        protected override bool Append(CMSampleBuffer sampleBuffer)
        {
            CMTime presentationTime = sampleBuffer.PresentationTimeStamp;

            using (CVPixelBuffer writerBuffer = adaptor.PixelBufferPool.CreatePixelBuffer()) {
                // Grab the pixel buffer from the sample buffer, if possible
                using (CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer()) {
                    var pixelBuffer = (CVPixelBuffer)imageBuffer;
                    if (pixelBuffer != null)
                    {
                        transformer.AdjustPixelBuffer(pixelBuffer, writerBuffer);
                    }
                }

                return(adaptor.AppendPixelBufferWithPresentationTime(writerBuffer, presentationTime));
            }
        }
예제 #17
0
        public static CMVideoFormatDescription?CreateForImageBuffer(CVImageBuffer imageBuffer, out CMFormatDescriptionError error)
        {
            if (imageBuffer is null)
            {
                ObjCRuntime.ThrowHelper.ThrowArgumentNullException(nameof(imageBuffer));
            }

            IntPtr desc;

            error = CMVideoFormatDescriptionCreateForImageBuffer(IntPtr.Zero, imageBuffer.Handle, out desc);
            if (error != CMFormatDescriptionError.None)
            {
                return(null);
            }

            return(new CMVideoFormatDescription(desc, true));
        }
예제 #18
0
        public void DidReadSampleBuffer(ReadWriteSampleBufferChannel sampleBufferChannel, CMSampleBuffer sampleBuffer)
        {
            // Calculate progress (scale of 0.0 to 1.0)
            double progress = AVReaderWriter.ProgressOfSampleBufferInTimeRange(sampleBuffer, _timeRange);

            _progressProc((float)progress * 100);

            // Grab the pixel buffer from the sample buffer, if possible
            CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer();

            var pixelBuffer = imageBuffer as CVPixelBuffer;

            if (pixelBuffer != null)
            {
                Delegate.AdjustPixelBuffer(pixelBuffer, null);                  // TODO: problem in original sample. No method
            }
        }
        public static CMVideoFormatDescription CreateForImageBuffer(CVImageBuffer imageBuffer, out CMFormatDescriptionError error)
        {
            if (imageBuffer == null)
            {
                throw new ArgumentNullException("imageBuffer");
            }

            IntPtr desc;

            error = CMVideoFormatDescriptionCreateForImageBuffer(IntPtr.Zero, imageBuffer.handle, out desc);
            if (error != CMFormatDescriptionError.None)
            {
                return(null);
            }

            return(new CMVideoFormatDescription(desc, true));
        }
예제 #20
0
        public static CMSampleBuffer CreateReadyWithImageBuffer(CVImageBuffer imageBuffer,
                                                                CMFormatDescription formatDescription, CMSampleTimingInfo[] sampleTiming, out CMSampleBufferError error)
        {
            if (imageBuffer == null)
            {
                throw new ArgumentNullException("imageBuffer");
            }
            if (formatDescription == null)
            {
                throw new ArgumentNullException("formatDescription");
            }

            IntPtr buffer;

            error = CMSampleBufferCreateReadyWithImageBuffer(IntPtr.Zero, imageBuffer.handle,
                                                             formatDescription.Handle, sampleTiming, out buffer);

            if (error != CMSampleBufferError.None)
                return(null); }
예제 #21
0
        public static CMSampleBuffer?CreateReadyWithImageBuffer(CVImageBuffer imageBuffer,
                                                                CMFormatDescription formatDescription, ref CMSampleTimingInfo sampleTiming, out CMSampleBufferError error)
        {
            if (imageBuffer is null)
            {
                ObjCRuntime.ThrowHelper.ThrowArgumentNullException(nameof(imageBuffer));
            }
            if (formatDescription is null)
            {
                ObjCRuntime.ThrowHelper.ThrowArgumentNullException(nameof(formatDescription));
            }

            IntPtr buffer;

            error = CMSampleBufferCreateReadyWithImageBuffer(IntPtr.Zero, imageBuffer.Handle,
                                                             formatDescription.Handle, ref sampleTiming, out buffer);

            if (error != CMSampleBufferError.None)
                return(null); }
예제 #22
0
        public void CVImageBufferYCbCrMatrixTest()
        {
            TestRuntime.AssertXcodeVersion(9, 0);

            var codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferYCbCrMatrix.ItuR2020);
            var matrixOption = CVImageBuffer.GetYCbCrMatrixOption(codepoint);

            Assert.AreEqual(CVImageBufferYCbCrMatrix.ItuR2020, matrixOption, "ItuR2020");

            codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferYCbCrMatrix.ItuR601_4);
            matrixOption = CVImageBuffer.GetYCbCrMatrixOption(codepoint);
            Assert.AreEqual(CVImageBufferYCbCrMatrix.ItuR601_4, matrixOption, "ItuR601_4");

            codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferYCbCrMatrix.ItuR709_2);
            matrixOption = CVImageBuffer.GetYCbCrMatrixOption(codepoint);
            Assert.AreEqual(CVImageBufferYCbCrMatrix.ItuR709_2, matrixOption, "ItuR709_2");

            codepoint    = CVImageBuffer.GetCodePoint(CVImageBufferYCbCrMatrix.Smpte240M1995);
            matrixOption = CVImageBuffer.GetYCbCrMatrixOption(codepoint);
            Assert.AreEqual(CVImageBufferYCbCrMatrix.Smpte240M1995, matrixOption, "Smpte240M1995");
        }
        public unsafe void ProcessPixelBuffer(CVImageBuffer imageBuffer)
        {
            using (var pixelBuffer = imageBuffer as CVPixelBuffer)
            {
                pixelBuffer.Lock(CVOptionFlags.None);

                int bufferWidth  = pixelBuffer.Width;
                int bufferHeight = pixelBuffer.Height;
                // offset by one to de-green the BGRA array (green is second)
                byte *pixelPtr = (byte *)pixelBuffer.BaseAddress.ToPointer() + 1;

                for (var row = 0; row < bufferHeight; row++)
                {
                    for (var column = 0; column < bufferWidth; column++)
                    {
                        *pixelPtr = 0;
                        pixelPtr += BYTES_PER_PIXEL;
                    }
                }

                pixelBuffer.Unlock(CVOptionFlags.None);
            }
        }
예제 #24
0
        public CVOpenGLESTexture TextureFromImage(CVImageBuffer imageBuffer, bool isTexture2d, OpenTK.Graphics.ES20.All internalFormat, int width, int height, OpenTK.Graphics.ES20.All pixelFormat, OpenTK.Graphics.ES20.DataType pixelType, int planeIndex, out CVReturn errorCode)
        {
            if (imageBuffer == null)
            {
                throw new ArgumentNullException("imageBuffer");
            }

            int    target = isTexture2d ? 0x0DE1 /* GL_TEXTURE_2D */ : 0x8D41 /* GL_RENDERBUFFER */;
            IntPtr texture;

            errorCode = CVOpenGLESTextureCacheCreateTextureFromImage(
                IntPtr.Zero,
                handle,                 /* textureCache dict, one day we might add it */
                imageBuffer.Handle,
                IntPtr.Zero,
                target,
                internalFormat, width, height, pixelFormat,
                pixelType, (IntPtr)planeIndex, out texture);
            if (errorCode != 0)
            {
                return(null);
            }
            return(new CVOpenGLESTexture(texture));
        }
예제 #25
0
        public virtual void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            // HACK: Change CMSampleBuffer.GetFormatDescription() to CMSampleBuffer.GetVideoFormatDescription()
            // HACK Change CMFormatDescription to CMVideoFormatDescription
            // CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription ();
            CMVideoFormatDescription formatDescription = sampleBuffer.GetVideoFormatDescription();

            if (connection == videoConnection)
            {
                // Get framerate
                CMTime timestamp = sampleBuffer.PresentationTimeStamp;
                CalculateFramerateAtTimestamp(timestamp);

                // Get frame dimensions (for onscreen display)
                if (VideoDimensions.IsEmpty)
                {
                    // HACK: Change GetVideoPresentationDimensions() to GetPresentationDimensions()
                    // VideoDimensions = formatDescription.GetVideoPresentationDimensions (true, false);
                    VideoDimensions = formatDescription.GetPresentationDimensions(true, false);
                }

                // Get the buffer type
                if (VideoType == 0)
                {
                    VideoType = formatDescription.MediaSubType;
                }

                // Synchronously process the pixel buffer to de-green it.
                using (var pixelBuffer = sampleBuffer.GetImageBuffer())
                    ProcessPixelBuffer(pixelBuffer);

                previewBufferQueue.Enqueue(sampleBuffer);

                //var writeBuffer = sampleBuffer.Duplicate ();
                InvokeOnMainThread(() => {
                    INativeObject j = previewBufferQueue.Dequeue();

                    var sbuf = j as CMSampleBuffer;
                    if (sbuf == null)
                    {
#if DEBUG
                        // Record the current sampleBuffer.ClassHandle
                        // Then run another iteration and on the next one, print the ClassHandle
                        Console.WriteLine("The type is {0}", j.ToString());
#endif
                        return;
                    }

                    using (CVImageBuffer pixBuf = sbuf.GetImageBuffer()) {
                        if (PixelBufferReadyForDisplay != null)
                        {
                            PixelBufferReadyForDisplay(pixBuf);
                        }
                    }
                });
            }
            // keep a reference to 'sampleBuffer', movieWritingQueue will remove it
            CompleteBufferUse(sampleBuffer);

            movieWritingQueue.DispatchAsync(() => {
                if (assetWriter != null)
                {
                    bool wasReadyToRecord = (readyToRecordAudio && readyToRecordVideo);

                    if (connection == videoConnection)
                    {
                        // Initialize the video input if this is not done yet
                        if (!readyToRecordVideo)
                        {
                            readyToRecordVideo = SetupAssetWriterVideoInput(formatDescription);
                        }

                        // Write the video data to file
                        if (readyToRecordVideo && readyToRecordAudio)
                        {
                            WriteSampleBuffer(sampleBuffer, AVMediaType.Video);
                        }
                    }
                    else if (connection == audioConnection)
                    {
                        if (!readyToRecordAudio)
                        {
                            readyToRecordAudio = SetupAssetWriterAudioInput(formatDescription);
                        }

                        if (readyToRecordAudio && readyToRecordVideo)
                        {
                            WriteSampleBuffer(sampleBuffer, AVMediaType.Audio);
                        }
                    }
                    bool isReadyToRecord = (readyToRecordAudio && readyToRecordVideo);

                    if (!wasReadyToRecord && isReadyToRecord)
                    {
                        recordingWillBeStarted = false;
                        IsRecording            = true;

                        if (RecordingDidStart != null)
                        {
                            RecordingDidStart();
                        }
                    }
                }
                CompleteBufferUse(sampleBuffer);
            });
        }
예제 #26
0
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription();

                if (connection == processor.videoConnection)
                {
                    // Get framerate
                    CMTime timestamp = sampleBuffer.PresentationTimeStamp;
                    CalculateFramerateAtTimestamp(timestamp);

                    // Get frame dimensions (for onscreen display)
                    if (processor.VideoDimensions.Width == 0 && processor.VideoDimensions.Height == 0)
                    {
                        processor.VideoDimensions = formatDescription.GetVideoPresentationDimensions(true, false);
                    }

                    // Get the buffer type
                    if (processor.VideoType == 0)
                    {
                        processor.VideoType = formatDescription.MediaSubType;
                    }
                    // TODO: processor.VideoType = (CMVideoCodecType)Enum.ToObject (typeof(CMVideoCodecType), formatDescription.MediaSubType);

                    // Synchronously process the pixel buffer to de-green it.
                    using (var pixelBuffer = sampleBuffer.GetImageBuffer())
                        ProcessPixelBuffer(pixelBuffer);

                    processor.previewBufferQueue.Enqueue(sampleBuffer);

                    //var writeBuffer = sampleBuffer.Duplicate ();
                    InvokeOnMainThread(() => {
                        var j = processor.previewBufferQueue.Dequeue();

                        var sbuf = j as CMSampleBuffer;
                        if (sbuf == null)
                        {
                            // Record the current sampleBuffer.ClassHandle
                            // Then run another iteration and on the next one, print the ClassHandle
                            Console.WriteLine("The type is {0}", new NSString(CFCopyDescription(j.Handle)));
                            return;
                        }

                        using (CVImageBuffer pixBuf = sbuf.GetImageBuffer()){
                            if (processor.PixelBufferReadyForDisplay != null)
                            {
                                processor.PixelBufferReadyForDisplay(pixBuf);
                            }
                        }

                        if (processor.assetWriter == null)
                        {
                            sbuf.Dispose();
                        }
                        else
                        {
                            processor.CompleteBufferUse(sbuf);
                        }
                    });
                }


                processor.movieWritingQueue.DispatchAsync(() => {
                    if (processor.assetWriter != null)
                    {
                        bool wasReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);

                        // Initialize the video input if this is not done yet
                        if (!processor.readyToRecordVideo)
                        {
                            processor.readyToRecordVideo = SetupAssetWriterVideoInput(formatDescription);
                        }

                        // Write the video data to file
                        if (processor.readyToRecordVideo && processor.readyToRecordAudio)
                        {
                            processor.WriteSampleBuffer(sampleBuffer, AVMediaType.Video);
                        }

                        bool isReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);

                        if (!wasReadyToRecord && isReadyToRecord)
                        {
                            processor.recordingWillBeStarted = false;
                            processor.IsRecording            = true;

                            if (processor.RecordingDidStart != null)
                            {
                                processor.RecordingDidStart();
                            }
                        }

                        processor.CompleteBufferUse(sampleBuffer);
                    }
                });
            }
예제 #27
0
        void DidDecompress(IntPtr sourceFrame, VTStatus status, VTDecodeInfoFlags flags, CVImageBuffer buffer, CMTime presentationTimeStamp, CMTime presentationDuration)
        {
            if (status != VTStatus.Ok) {
                Console.WriteLine ("Error decompresssing frame at time: {0:#.###} error: {1} infoFlags: {2}",
                    (float)presentationTimeStamp.Value / presentationTimeStamp.TimeScale, (int)status, flags);
                return;
            }

            if (buffer == null)
                return;

            // Find the correct position for this frame in the output frames array
            if (presentationTimeStamp.IsInvalid) {
                Console.WriteLine ("Not a valid time for image buffer");
                return;
            }

            var framePTS = presentationTimeStamp.Seconds;

            lock (thisLock) {
                // since we want to keep the managed `pixelBuffer` alive outside the execution
                // of the callback we need to create our own (managed) instance from the handle
                var pixelBuffer = Runtime.GetINativeObject<CVPixelBuffer> (buffer.Handle, false);

                int insertionIndex = presentationTimes.Count - 1;
                while (insertionIndex >= 0) {
                    var aNumber = presentationTimes [insertionIndex];
                    if (aNumber <= framePTS)
                        break;
                    insertionIndex--;
                }

                if (insertionIndex + 1 == presentationTimes.Count) {
                    presentationTimes.Add (framePTS);
                    outputFrames.Add (pixelBuffer);
                } else {
                    presentationTimes.Insert (insertionIndex + 1, framePTS);
                    outputFrames.Insert (insertionIndex + 1, pixelBuffer);
                }
            }
        }
		public unsafe void ProcessPixelBuffer (CVImageBuffer imageBuffer)
		{
			using (var pixelBuffer = imageBuffer as CVPixelBuffer)
			{
				pixelBuffer.Lock (CVOptionFlags.None);
				
				int bufferWidth = pixelBuffer.Width;
				int bufferHeight = pixelBuffer.Height;
				// offset by one to de-green the BGRA array (green is second)
				byte* pixelPtr = (byte*)pixelBuffer.BaseAddress.ToPointer () + 1;
				
				for (var row = 0; row < bufferHeight; row++){
					for (var column = 0; column < bufferWidth; column++) {
						*pixelPtr = 0;
						pixelPtr += BYTES_PER_PIXEL;
					}
				}
				
				pixelBuffer.Unlock (CVOptionFlags.None);
			}
		}
예제 #29
0
        void DidDecompress(IntPtr sourceFrame, VTStatus status, VTDecodeInfoFlags flags, CVImageBuffer buffer, CMTime presentationTimeStamp, CMTime presentationDuration)
        {
            if (status != VTStatus.Ok)
            {
                Console.WriteLine("Error decompresssing frame at time: {0:#.###} error: {1} infoFlags: {2}",
                                  (float)presentationTimeStamp.Value / presentationTimeStamp.TimeScale, (int)status, flags);
                return;
            }

            if (buffer == null)
            {
                return;
            }

            // Find the correct position for this frame in the output frames array
            if (presentationTimeStamp.IsInvalid)
            {
                Console.WriteLine("Not a valid time for image buffer");
                return;
            }

            var framePTS = presentationTimeStamp.Seconds;

            lock (thisLock) {
                // since we want to keep the managed `pixelBuffer` alive outside the execution
                // of the callback we need to create our own (managed) instance from the handle
                var pixelBuffer = Runtime.GetINativeObject <CVPixelBuffer> (buffer.Handle, false);

                int insertionIndex = presentationTimes.Count - 1;
                while (insertionIndex >= 0)
                {
                    var aNumber = presentationTimes [insertionIndex];
                    if (aNumber <= framePTS)
                    {
                        break;
                    }
                    insertionIndex--;
                }

                if (insertionIndex + 1 == presentationTimes.Count)
                {
                    presentationTimes.Add(framePTS);
                    outputFrames.Add(pixelBuffer);
                }
                else
                {
                    presentationTimes.Insert(insertionIndex + 1, framePTS);
                    outputFrames.Insert(insertionIndex + 1, pixelBuffer);
                }
            }
        }
 public void OnPixelBufferReadyForDisplay(CVImageBuffer imageBuffer)
 {
     // Don't make OpenGLES calls while in the backgroud.
     if (UIApplication.SharedApplication.ApplicationState != UIApplicationState.Background)
         oglView.DisplayPixelBuffer(imageBuffer);
 }
			public unsafe void ProcessPixelBuffer (CVImageBuffer imageBuffer)
			{
				using (CVPixelBuffer pixelBuffer = imageBuffer as CVPixelBuffer)
				{
					pixelBuffer.Lock (CVOptionFlags.None);
					
					int bufferWidth = pixelBuffer.Width;
					int bufferHeight = pixelBuffer.Height;
					byte* pixelPtr = (byte*)pixelBuffer.BaseAddress.ToPointer();
					
					int position = 0;
					for (var row = 0; row < bufferHeight; row++){
						for (var column = 0; column < bufferWidth; column++) {
							// De-green (Second pixel in BGRA is green)
							*(pixelPtr+1) = 0;
							pixelPtr += BYTES_PER_PIXEL;
							position += BYTES_PER_PIXEL; // For each pixel increase the offset by the number of bytes per pixel
						}
					}
					
					pixelBuffer.Unlock (CVOptionFlags.None);
				}
			}
예제 #32
0
        public void DisplayPixelBuffer(CVImageBuffer imageBuffer)
        {
            // First check to make sure we have a FrameBuffer to write to.
            if (frameBuffer == 0)
            {
                var success = CreateFrameBuffer();
                if (!success)
                {
                    Console.WriteLine("Problem initializing OpenGL buffers.");
                    return;
                }
            }

            if (videoTextureCache == null)
            {
                Console.WriteLine("Video Texture Cache not initialized");
                return;
            }

            if (!(imageBuffer is CVPixelBuffer pixelBuffer))
            {
                Console.WriteLine("Could not get Pixel Buffer from Image Buffer");
                return;
            }

            // Create a CVOpenGLESTexture from the CVImageBuffer
            var frameWidth  = (int)pixelBuffer.Width;
            var frameHeight = (int)pixelBuffer.Height;

            using (var texture = videoTextureCache.TextureFromImage(imageBuffer, true, All.Rgba, frameWidth, frameHeight, All.Bgra, DataType.UnsignedByte, 0, out CVReturn ret))
            {
                if (texture == null || ret != CVReturn.Success)
                {
                    Console.WriteLine("Could not create Texture from Texture Cache");
                    return;
                }

                GL.BindTexture(texture.Target, texture.Name);

                // Set texture parameters
                GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.Linear);
                GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear);
                GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.ClampToEdge);
                GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.ClampToEdge);

                GL.BindFramebuffer(FramebufferTarget.Framebuffer, frameBuffer);

                // Set the view port to the entire view
                GL.Viewport(0, 0, renderBufferWidth, renderBufferHeight);

                var squareVerticies = new float[, ]
                {
                    { -1.0F, -1.0F },
                    { 1.0F, -1.0F },
                    { -1.0F, 1.0F },
                    { 1.0F, 1.0F }
                };

                // The texture verticies are setup such that we flip the texture vertically.
                // This is so that our top left origin buffers match OpenGL's bottom left texture coordinate system.
                var textureSamplingRect = TextureSamplingRectForCroppingTextureWithAspectRatio(new CGSize(frameWidth, frameHeight), Bounds.Size);

                var textureVertices = new float[, ]
                {
                    { (float)textureSamplingRect.Left, (float)textureSamplingRect.Bottom },
                    { (float)textureSamplingRect.Right, (float)textureSamplingRect.Bottom },
                    { (float)textureSamplingRect.Left, (float)textureSamplingRect.Top },
                    { (float)textureSamplingRect.Right, (float)textureSamplingRect.Top }
                };

                // Draw the texture on the screen with OpenGL ES 2
                RenderWithSquareVerticies(squareVerticies, textureVertices);

                GL.BindTexture(texture.Target, texture.Name);

                // Flush the CVOpenGLESTexture cache and release the texture
                videoTextureCache.Flush(CVOptionFlags.None);
            }
        }
예제 #33
0
 public CVOpenGLESTexture TextureFromImage(CVImageBuffer imageBuffer, bool isTexture2d, OpenTK.Graphics.ES30.All internalFormat, int width, int height, OpenTK.Graphics.ES30.All pixelFormat, OpenTK.Graphics.ES30.DataType pixelType, int planeIndex, out CVReturn errorCode)
 {
     return(TextureFromImage(imageBuffer, isTexture2d, (OpenTK.Graphics.ES20.All)internalFormat, width, height, (OpenTK.Graphics.ES20.All)pixelFormat, (OpenTK.Graphics.ES20.DataType)pixelType, planeIndex, out errorCode));
 }
        public override void WindowControllerDidLoadNib(NSWindowController windowController)
        {
            base.WindowControllerDidLoadNib(windowController);

            // A reference to the window controller must be kept on the managed side
            // to keep the object from being GC'd so that the delegates below resolve.
            // Don't remove unless the framework is updated to track the reference.
            this.windowController = windowController;

            NSError err;

            windowController.Window.WillClose += delegate {
                if (captureSession != null)
                {
                    captureSession.StopRunning();
                }
                var dev = captureInput.Device;
                if (dev.IsOpen)
                {
                    dev.Close();
                }
            };

            // Create a movie, and store the information in memory on an NSMutableData
            movie = new QTMovie(new NSMutableData(1), out err);
            if (movie == null)
            {
                NSAlert.WithError(err).RunModal();
                return;
            }
            movieView.Movie = movie;

            // Find video device
            captureSession = new QTCaptureSession();
            var device = QTCaptureDevice.GetDefaultInputDevice(QTMediaType.Video);

            if (!device.Open(out err))
            {
                NSAlert.WithError(err).RunModal();
                return;
            }

            // Add device input
            captureInput = new QTCaptureDeviceInput(device);
            if (!captureSession.AddInput(captureInput, out err))
            {
                NSAlert.WithError(err).RunModal();
                return;
            }

            // Create decompressor for video output, to get raw frames
            decompressedVideo = new QTCaptureDecompressedVideoOutput();
            decompressedVideo.DidOutputVideoFrame += delegate(object sender, QTCaptureVideoFrameEventArgs e) {
                lock (this){
                    currentImage = e.VideoFrame;
                }
            };
            if (!captureSession.AddOutput(decompressedVideo, out err))
            {
                NSAlert.WithError(err).RunModal();
                return;
            }

            // Activate preview
            captureView.CaptureSession = captureSession;

            // Start running.
            captureSession.StartRunning();
        }
예제 #35
0
		public CVOpenGLESTexture TextureFromImage (CVImageBuffer imageBuffer, bool isTexture2d, OpenTK.Graphics.ES30.All internalFormat, int width, int height, OpenTK.Graphics.ES30.All pixelFormat, OpenTK.Graphics.ES30.DataType pixelType, int planeIndex, out CVReturn errorCode)
		{
			return TextureFromImage (imageBuffer, isTexture2d, (OpenTK.Graphics.ES20.All) internalFormat, width, height, (OpenTK.Graphics.ES20.All) pixelFormat, (OpenTK.Graphics.ES20.DataType) pixelType, planeIndex, out errorCode);
		}
		public void DisplayPixelBuffer (CVImageBuffer imageBuffer)
		{
			// First check to make sure we have a FrameBuffer to write to.
			if (FrameBuffer == 0) {
				var success = CreateFrameBuffer ();
				if (!success) {
					Console.WriteLine ("Problem initializing OpenGL buffers.");
					return;
				}
			}
			
			if (videoTextureCache == null){
				Console.WriteLine("Video Texture Cache not initialized");
				return;
			}
			
			var pixelBuffer = imageBuffer as CVPixelBuffer;
			if (pixelBuffer == null){
				Console.WriteLine ("Could not get Pixel Buffer from Image Buffer");
				return;
			}
			
			// Create a CVOpenGLESTexture from the CVImageBuffer
			var frameWidth = pixelBuffer.Width;
			var frameHeight = pixelBuffer.Height;
			CVReturn ret;
			using (var texture =  videoTextureCache.TextureFromImage(imageBuffer, true, All.Rgba, frameWidth, frameHeight, All.Bgra, DataType.UnsignedByte, 0, out ret)){
				if (texture == null || ret != CVReturn.Success){
					Console.WriteLine ("Could not create Texture from Texture Cache");
					return;
				}
				GL.BindTexture (texture.Target, texture.Name);
			
				// Set texture parameters
				GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.Linear);
				GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear);
				GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.ClampToEdge);
				GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.ClampToEdge);
			
				GL.BindFramebuffer (FramebufferTarget.Framebuffer, FrameBuffer);
			
				// Set the view port to the entire view
				GL.Viewport (0, 0, renderBufferWidth, renderBufferHeight);
			
				var squareVerticies = new float[,] {
					 { -1.0F, -1.0F},
					 { 1.0F, -1.0F },
					 { -1.0F, 1.0F },
					 { 1.0F, 1.0F }
				};
				
				// The texture verticies are setup such that we flip the texture vertically.
				// This is so that our top left origin buffers match OpenGL's bottom left texture coordinate system.
				var textureSamplingRect = TextureSamplingRectForCroppingTextureWithAspectRatio (new SizeF (frameWidth, frameHeight), this.Bounds.Size);
				var textureVertices = new float[,]
				{
					{textureSamplingRect.Left, textureSamplingRect.Bottom},
					{textureSamplingRect.Right, textureSamplingRect.Bottom},
					{textureSamplingRect.Left, textureSamplingRect.Top},
					{textureSamplingRect.Right, textureSamplingRect.Top}
				};
				
				// Draw the texture on the screen with OpenGL ES 2
				RenderWithSquareVerticies (squareVerticies, textureVertices);
			
				GL.BindTexture (texture.Target, texture.Name);
			
				// Flush the CVOpenGLESTexture cache and release the texture
				videoTextureCache.Flush (CVOptionFlags.None);
			}
		}
예제 #37
0
		public CVOpenGLESTexture TextureFromImage (CVImageBuffer imageBuffer, bool isTexture2d, OpenTK.Graphics.ES20.All internalFormat, int width, int height, OpenTK.Graphics.ES20.All pixelFormat, OpenTK.Graphics.ES20.DataType pixelType, int planeIndex, out CVReturn errorCode)
		{
			if (imageBuffer == null)
				throw new ArgumentNullException ("imageBuffer");
			
			int target = isTexture2d ? 0x0DE1 /* GL_TEXTURE_2D */ : 0x8D41 /* GL_RENDERBUFFER */;
			IntPtr texture;
			errorCode = CVOpenGLESTextureCacheCreateTextureFromImage (
				IntPtr.Zero,
				handle, /* textureCache dict, one day we might add it */
				imageBuffer.Handle,
				IntPtr.Zero,
				target,
				internalFormat, width, height, pixelFormat,
				pixelType, (IntPtr) planeIndex, out texture);
			if (errorCode != 0)
				return null;
			return new CVOpenGLESTexture (texture);
		}