Exemplo n.º 1
0
        public void SetInvalidateCallback_Replace()
        {
            using (var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB)) {
                CMFormatDescriptionError fde;
                using (var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde)) {
                    var sampleTiming = new CMSampleTimingInfo();
                    CMSampleBufferError sbe;
                    using (var sb = CMSampleBuffer.CreateForImageBuffer(pixelBuffer, true, desc, sampleTiming, out sbe)) {
                        int i      = 0;
                        var result = sb.SetInvalidateCallback(delegate(CMSampleBuffer buffer) {
                            i++;
                        });

                        // we cannot replace the (native) callback without getting an error (so we should not replace
                        // the managed one either, that would be confusing and make it hard to port code)
                        result = sb.SetInvalidateCallback(delegate(CMSampleBuffer buffer) {
                            i--;
                            Assert.AreSame(buffer, sb, "same");
                        });
                        Assert.That(result, Is.EqualTo(CMSampleBufferError.RequiredParameterMissing), "RequiredParameterMissing");

                        sb.Invalidate();
                        Assert.That(i, Is.EqualTo(1), "1");
                    }
                }
            }
        }
 public static VTDecompressionSession Create(VTDecompressionOutputCallback outputCallback,
                                             CMVideoFormatDescription formatDescription,
                                             VTVideoDecoderSpecification decoderSpecification,          // hardware acceleration is default behavior on iOS. no opt-in required.
                                             CVPixelBufferAttributes destinationImageBufferAttributes)
 {
     return(Create(outputCallback, formatDescription, decoderSpecification, destinationImageBufferAttributes == null ? null : destinationImageBufferAttributes.Dictionary, static_newDecompressionOutputCallback));
 }
Exemplo n.º 3
0
        public void SetInvalidateCallback()
        {
            using (var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB)) {
                CMFormatDescriptionError fde;
                using (var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde)) {
                    var sampleTiming = new CMSampleTimingInfo();
                    CMSampleBufferError sbe;
                    using (var sb = CMSampleBuffer.CreateForImageBuffer(pixelBuffer, true, desc, sampleTiming, out sbe)) {
                        int i      = 0;
                        var result = sb.SetInvalidateCallback(delegate(CMSampleBuffer buffer) {
                            i++;
                            Assert.AreSame(buffer, sb, "same");
                        });
                        Assert.That(result, Is.EqualTo(CMSampleBufferError.None), "SetInvalidateCallback/None");

                        result = (CMSampleBufferError)sb.Invalidate();
                        Assert.That(result, Is.EqualTo(CMSampleBufferError.None), "Invalidate/None");
                        Assert.That(i, Is.EqualTo(1), "1");

                        // a second call to Invalidate returns Invalidated
                        result = (CMSampleBufferError)sb.Invalidate();
                        Assert.That(result, Is.EqualTo(CMSampleBufferError.Invalidated), "Invalidated");
                    }
                }
            }
        }
Exemplo n.º 4
0
        public void SetInvalidateCallback_Null()
        {
            using (var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB)) {
                CMFormatDescriptionError fde;
                using (var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde)) {
                    var sampleTiming = new CMSampleTimingInfo();
                    CMSampleBufferError sbe;
                    using (var sb = CMSampleBuffer.CreateForImageBuffer(pixelBuffer, true, desc, sampleTiming, out sbe)) {
                        // ignore `null`, i.e. no crash
                        Assert.That(sb.SetInvalidateCallback(null), Is.EqualTo(CMSampleBufferError.None), "null");

                        int i      = 0;
                        var result = sb.SetInvalidateCallback(delegate(CMSampleBuffer buffer) {
                            i++;
                            Assert.AreSame(buffer, sb, "same");
                        });
                        Assert.That(result, Is.EqualTo(CMSampleBufferError.None), "SetInvalidateCallback/None");

                        // we can reset (nullify) the callback
                        Assert.That(sb.SetInvalidateCallback(null), Is.EqualTo(CMSampleBufferError.None), "null-2");

                        result = (CMSampleBufferError)sb.Invalidate();
                        Assert.That(result, Is.EqualTo(CMSampleBufferError.None), "Invalidate/None");
                        Assert.That(i, Is.EqualTo(0), "0");
                    }
                }
            }
        }
Exemplo n.º 5
0
        public void SetTrack(IMediaStreamTrack videoTrack)
        {
            var cameraDevices = Webrtc.RTCCameraVideoCapturer.CaptureDevices;

            _isCamera = cameraDevices.Any(device => device.ModelID == videoTrack.Id);

            var nativeVideoTrack = videoTrack.NativeObject as Webrtc.RTCVideoTrack;

            if (_isCamera)
            {
                _cameraView = new Webrtc.RTCCameraPreviewView();
                AddSubview(_cameraView);

                var nativeVideoSource = nativeVideoTrack.Source;
                /*var*/ _videoCapturer  = new Webrtc.RTCCameraVideoCapturer();
                _videoCapturer.Delegate = nativeVideoSource;

                var cameraDevice = Webrtc.RTCCameraVideoCapturer.CaptureDevices
                                   ////                .FirstOrDefault(device => device.Position == cameraType.ToNative());
                                   // Get the selected device by matching RTCMediaStreamTrack.TrackId with AVCaptureDevice.ModelID from
                                   // RTCCameraVideoCapturer.CaptureDevices list.
                                   .Single(device => device.ModelID == videoTrack.Id);

                var formats = Webrtc.RTCCameraVideoCapturer.SupportedFormatsForDevice(cameraDevice);
                System.Diagnostics.Debug.WriteLine($"============= Capture Formats =============== ");
                int index = 0;
                foreach (var f in formats)
                {
                    CMVideoFormatDescription desc = (CMVideoFormatDescription)f.FormatDescription;
                    var dim             = desc.Dimensions;
                    var maxSupportedFps = 0d;
                    foreach (var fpsRange in f.VideoSupportedFrameRateRanges)
                    {
                        maxSupportedFps = Math.Max(maxSupportedFps, fpsRange.MaxFrameRate);
                    }
                    System.Diagnostics.Debug.WriteLine($"index:{index++} width:{dim.Width} height:{dim.Height} fpsMax:{maxSupportedFps}");
                }


                var format = Webrtc.RTCCameraVideoCapturer.SupportedFormatsForDevice(cameraDevice)[6 /*0*/];
                CMVideoFormatDescription videoFormatDescription = (CMVideoFormatDescription)format.FormatDescription;
                var capturerDimensions = videoFormatDescription.Dimensions;
                var capturerSize       = new CGSize(capturerDimensions.Width, capturerDimensions.Height);
                var fps = 30;
                _videoCapturer.StartCaptureWithDevice(cameraDevice, format, fps);

                _cameraView.CaptureSession = _videoCapturer.CaptureSession;
            }
            else
            {
                _rendererView          = new Webrtc.RTCEAGLVideoView();
                _rendererView.Delegate = this;
                AddSubview(_rendererView);

                nativeVideoTrack.AddRenderer(_rendererView);
            }

            SetNeedsLayout();
        }
Exemplo n.º 6
0
        public void VideoFormatDescriptionConstructors()
        {
#if __UNIFIED__
            using (var obj = new CMVideoFormatDescription(CMVideoCodecType.H264, new CMVideoDimensions(960, 540))) {
            }
#else
            using (var obj = new CMVideoFormatDescription(CMVideoCodecType.H264, new System.Drawing.Size(960, 540))) {
            }
#endif
        }
Exemplo n.º 7
0
        public void CreateForImageBuffer()
        {
            var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB);

            CMFormatDescriptionError fde;
            var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde);

            var sampleTiming = new CMSampleTimingInfo();

            CMSampleBufferError sbe;
            var sb = CMSampleBuffer.CreateForImageBuffer(pixelBuffer, true, desc, sampleTiming, out sbe);

            Assert.IsNotNull(sb, "#1");
            Assert.AreEqual(CMSampleBufferError.None, sbe, "#2");
        }
        // HACK: Change CMFormatDescription to CMVideoFormatDescription
        public bool SetupAssetWriterVideoInput(CMVideoFormatDescription currentFormatDescription)
        {
            //Console.WriteLine ("Setting up Video Asset Writer");
            float bitsPerPixel;
            // HACK: Change VideoDimensions to Dimensions, as this type was changed to CMVideoFormatDescription
            var dimensions = currentFormatDescription.Dimensions;
            int numPixels  = dimensions.Width * dimensions.Height;
            int bitsPerSecond;

            // Assume that lower-than-SD resolution are intended for streaming, and use a lower bitrate
            bitsPerPixel = numPixels < (640 * 480) ? 4.05F : 11.4F;

            bitsPerSecond = (int)(numPixels * bitsPerPixel);

            NSDictionary videoCompressionSettings = new NSDictionary(
                AVVideo.CodecKey, AVVideo.CodecH264,
                AVVideo.WidthKey, dimensions.Width,
                AVVideo.HeightKey, dimensions.Height,
                AVVideo.CompressionPropertiesKey, new NSDictionary(
                    AVVideo.AverageBitRateKey, bitsPerSecond,
                    AVVideo.MaxKeyFrameIntervalKey, 30
                    )
                );

            if (assetWriter.CanApplyOutputSettings(videoCompressionSettings, AVMediaType.Video))
            {
                // HACK: Change NSDictionary into AVVideoSettingsCompressed created using that NSDictionary (videoCompressionSettings)
                assetWriterVideoIn = new AVAssetWriterInput(AVMediaType.Video, new AVVideoSettingsCompressed(videoCompressionSettings));
                assetWriterVideoIn.ExpectsMediaDataInRealTime = true;
                assetWriterVideoIn.Transform = TransformFromCurrentVideoOrientationToOrientation(ReferenceOrientation);

                if (assetWriter.CanAddInput(assetWriterVideoIn))
                {
                    assetWriter.AddInput(assetWriterVideoIn);
                }
                else
                {
                    Console.WriteLine("Couldn't add asset writer video input.");
                    return(false);
                }
            }
            else
            {
                Console.WriteLine("Couldn't apply video output settings.");
            }

            return(true);
        }
Exemplo n.º 9
0
        public void CreateReadyWithImageBuffer_ArrayValidations()
        {
            TestRuntime.AssertSystemVersion(PlatformName.iOS, 8, 0, throwIfOtherPlatform: false);
            TestRuntime.AssertSystemVersion(PlatformName.MacOSX, 10, 10, throwIfOtherPlatform: false);

            CMFormatDescriptionError fde;

            using (var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB))
                using (var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde)) {
                    CMSampleBufferError sbe;
                    Assert.Throws <ArgumentNullException> (() => CMSampleBuffer.CreateReadyWithImageBuffer(pixelBuffer, desc, null, out sbe), "null");

                    var stia = new CMSampleTimingInfo [0];
                    Assert.Throws <ArgumentException> (() => CMSampleBuffer.CreateReadyWithImageBuffer(pixelBuffer, desc, stia, out sbe), "empty");
                }
        }
Exemplo n.º 10
0
        public void CreateReadyWithImageBuffer()
        {
            TestRuntime.AssertSystemVersion(PlatformName.iOS, 8, 0, throwIfOtherPlatform: false);
            TestRuntime.AssertSystemVersion(PlatformName.MacOSX, 10, 10, throwIfOtherPlatform: false);

            CMFormatDescriptionError fde;

            using (var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB))
                using (var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde)) {
                    CMSampleBufferError sbe;
                    using (var sb = CMSampleBuffer.CreateReadyWithImageBuffer(pixelBuffer, desc, new CMSampleTimingInfo [0], out sbe)) {
                        Assert.That(sb.Handle, Is.Not.EqualTo(IntPtr.Zero), "CMSampleBuffer");
                        Assert.That(sbe, Is.EqualTo(CMSampleBufferError.None), "CMSampleBufferError");
                    }
                }
        }
Exemplo n.º 11
0
        private CGImage CreateImage(CMSampleBuffer sampleBuffer)
        {
            CGImage image = null;

            CMVideoFormatDescription formatDescription = sampleBuffer.GetVideoFormatDescription();
            var           subType     = formatDescription.MediaSubType;
            CMBlockBuffer blockBuffer = sampleBuffer.GetDataBuffer();

            if (blockBuffer != null)
            {
                if (subType != (int)CMVideoCodecType.JPEG)
                {
                    throw new Exception("Block buffer must be JPEG encoded.");
                }

                var jpegData = new NSMutableData();
                jpegData.Length = blockBuffer.DataLength;

                blockBuffer.CopyDataBytes(0, blockBuffer.DataLength, jpegData.Bytes);

                using (var imageSource = CGImageSource.FromData(jpegData)) {
                    var decodeOptions = new CGImageOptions {
                        ShouldAllowFloat = false,
                        ShouldCache      = false
                    };

                    image = imageSource.CreateImage(0, decodeOptions);
                }
            }
            else
            {
                if (subType != (int)CVPixelFormatType.CV32BGRA)
                {
                    throw new Exception("Image buffer must be BGRA encoded.");
                }

                CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer();

                using (var colorSpace = CGColorSpace.CreateDeviceRGB())
                    using (var bitmapContext = new CGBitmapContext(imageBuffer.Handle,
                                                                   (int)imageBuffer.DisplaySize.Width, (int)imageBuffer.DisplaySize.Height, 8, 0, colorSpace, CGImageAlphaInfo.NoneSkipFirst)) {
                        image = bitmapContext.ToImage();
                    }
            }

            return(image);
        }
Exemplo n.º 12
0
        public void CreateReadyWithImageBuffer()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(8, 0))
            {
                Assert.Inconclusive("Requires iOS8");
            }

            CMFormatDescriptionError fde;

            using (var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB))
                using (var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde)) {
                    CMSampleBufferError sbe;
                    using (var sb = CMSampleBuffer.CreateReadyWithImageBuffer(pixelBuffer, desc, new CMSampleTimingInfo [0], out sbe)) {
                        Assert.That(sb.Handle, Is.Not.EqualTo(IntPtr.Zero), "CMSampleBuffer");
                        Assert.That(sbe, Is.EqualTo(CMSampleBufferError.None), "CMSampleBufferError");
                    }
                }
        }
Exemplo n.º 13
0
        protected ICMAttachmentBearer GetInstance(Type t)
        {
            if (!CMAttachmentInterfaceType.IsAssignableFrom(t))
            {
                throw new ArgumentException("t");
            }
            switch (t.Name)
            {
            case "CMBlockBuffer":
                CMBlockBufferError bbe;
                var result = CMBlockBuffer.CreateEmpty(0, CMBlockBufferFlags.AssureMemoryNow, out bbe);
                if (bbe == CMBlockBufferError.None)
                {
                    return(result);
                }
                else
                {
                    throw new InvalidOperationException(string.Format("Could not create the new instance {0}.", bbe.ToString()));
                }

            case "CMSampleBuffer":
                var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB);

                CMFormatDescriptionError fde;
                var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde);

                var sampleTiming = new CMSampleTimingInfo();

                CMSampleBufferError sbe;
                var sb = CMSampleBuffer.CreateForImageBuffer(pixelBuffer, true, desc, sampleTiming, out sbe);
                if (sbe == CMSampleBufferError.None)
                {
                    return(sb);
                }
                else
                {
                    throw new InvalidOperationException(string.Format("Could not create the new instance {0}.", sbe.ToString()));
                }

            default:
                throw new InvalidOperationException(string.Format("Could not create the new instance for type {0}.", t.Name));
            }
        }
Exemplo n.º 14
0
        public void HevcParameterSetsTest()
        {
            TestRuntime.AssertXcodeVersion(9, 0);

            var arr0 = new byte [] { 0x40, 0x01, 0x0C, 0x06, 0xFF, 0xFF, 0x01, 0x60, 0x00, 0x00, 0x03, 0x00, 0xB0, 0x00, 0x00, 0x03, 0x00, 0x00, 0x03, 0x00, 0x5D, 0x00, 0x00, 0x15, 0xC0, 0x90 };
            var arr1 = new byte [] { 0x42, 0x01, 0x06, 0x01, 0x60, 0x00, 0x00, 0x03, 0x00, 0xB0, 0x00, 0x00, 0x03, 0x00, 0x00, 0x03, 0x00, 0x5D, 0x00, 0x00, 0xA0, 0x02, 0x80, 0x80, 0x2D, 0x16, 0x20, 0x57, 0xB9, 0x16, 0x41, 0x57, 0x20, 0x92, 0x7E, 0x84, 0x95, 0x4D, 0x69, 0x94, 0x92, 0x7E, 0x84, 0x95, 0x4D, 0x69, 0x9C, 0x92, 0x4B, 0x95, 0x4F, 0xA9, 0x49, 0x3E, 0x49, 0xD4, 0x93, 0xEA, 0x72, 0x49, 0x2B, 0x92, 0x5C, 0x97, 0xA9, 0xB8, 0x08, 0x08, 0x35, 0x20, 0x10 };
            var arr2 = new byte [] { 0x44, 0x01, 0xC0, 0x2C, 0xBC, 0x14, 0xC9 };

            var props = new List <byte []> {
                arr0, arr1, arr2
            };
            CMFormatDescriptionError error;
            var desc = CMVideoFormatDescription.FromHevcParameterSets(props, 4, null, out error);

            props = null;
            Assert.That(error == CMFormatDescriptionError.None, "HevcParameterSetsTest 1");
            Assert.NotNull(desc, "HevcParameterSetsTest 2");
            Assert.That(desc.Dimensions.Height == 720 && desc.Dimensions.Width == 1280, "HevcParameterSetsTest 3");

            CMFormatDescriptionError err;
            nuint paramCount;
            int   nalCount;
            var   bytes = desc.GetHevcParameterSet(0, out paramCount, out nalCount, out err);

            Assert.That(err == CMFormatDescriptionError.None, "HevcParameterSetsTest arr0 1");
            Assert.NotNull(bytes, "HevcParameterSetsTest arr0 2");
            Assert.True(nalCount == 4 && paramCount == 3);
            Assert.That(arr0, Is.EqualTo(bytes), "HevcParameterSetsTest arr0 roundtrip");

            bytes = desc.GetHevcParameterSet(1, out paramCount, out nalCount, out err);
            Assert.That(err == CMFormatDescriptionError.None, "HevcParameterSetsTest arr1 1");
            Assert.NotNull(bytes, "HevcParameterSetsTest arr1 2");
            Assert.True(nalCount == 4 && paramCount == 3);
            Assert.That(arr1, Is.EqualTo(bytes), "HevcParameterSetsTest arr1 roundtrip");

            bytes = desc.GetHevcParameterSet(2, out paramCount, out nalCount, out err);
            Assert.That(err == CMFormatDescriptionError.None, "HevcParameterSetsTest arr2 1");
            Assert.NotNull(bytes, "HevcParameterSetsTest arr2 2");
            Assert.True(nalCount == 4 && paramCount == 3);
            Assert.That(arr2, Is.EqualTo(bytes), "HevcParameterSetsTest arr2 roundtrip");
        }
Exemplo n.º 15
0
        public static VTDecompressionSession Create(CMVideoFormatDescription formatDescription,
                                                    VTVideoDecoderSpecification decoderSpecification = null, // hardware acceleration is default behavior on iOS. no opt-in required.
                                                    NSDictionary destinationImageBufferAttributes    = null) // Undocumented options, probably always null
        {
            if (formatDescription == null)
            {
                throw new ArgumentNullException("formatDescription");
            }

            var callbackStruct = default(VTDecompressionOutputCallbackRecord);

            IntPtr ret;

            var result = VTDecompressionSessionCreate(IntPtr.Zero, formatDescription.Handle,
                                                      decoderSpecification != null ? decoderSpecification.Dictionary.Handle : IntPtr.Zero,
                                                      destinationImageBufferAttributes != null ? destinationImageBufferAttributes.Handle : IntPtr.Zero,
                                                      ref callbackStruct,
                                                      out ret);

            return(result == VTStatus.Ok && ret != IntPtr.Zero
                                ? new VTDecompressionSession(ret, true)
                                : null);
        }
Exemplo n.º 16
0
        public void H264ParameterSetsTest()
        {
            if (!TestRuntime.CheckXcodeVersion(5, 0, 1))
            {
                Assert.Inconclusive("CMVideoFormatDescription.FromH264ParameterSets is iOS7+ and macOS 10.9+");
            }

            var arr0 = new byte[] { 0x67, 0x64, 0x00, 0x29, 0xAC, 0x56, 0x80, 0x78, 0x02, 0x27, 0xE5, 0x9A, 0x80, 0x80, 0x80, 0x81 };
            var arr1 = new byte[] { 0x28, 0xEE, 0x04, 0xF2, 0xC0 };

            var props = new List <byte[]> {
                arr0, arr1
            };
            CMFormatDescriptionError error;
            var desc = CMVideoFormatDescription.FromH264ParameterSets(props, 4, out error);

            props = null;
            Assert.That(error == CMFormatDescriptionError.None, "H264ParameterSetsTest");
            Assert.NotNull(desc, "H264ParameterSetsTest");
            Assert.That(desc.Dimensions.Height == 1080 && desc.Dimensions.Width == 1920, "H264ParameterSetsTest");

            CMFormatDescriptionError err;
            nuint paramCount;
            int   nalCount;
            var   bytes = desc.GetH264ParameterSet(0, out paramCount, out nalCount, out err);

            Assert.That(err == CMFormatDescriptionError.None, "H264ParameterSetsTest");
            Assert.NotNull(bytes, "H264ParameterSetsTest");
            Assert.True(nalCount == 4 && paramCount == 2);
            Assert.That(arr0, Is.EqualTo(bytes), "H264ParameterSetsTest roundtrip");

            bytes = desc.GetH264ParameterSet(1, out paramCount, out nalCount, out err);
            Assert.That(err == CMFormatDescriptionError.None, "H264ParameterSetsTest");
            Assert.NotNull(bytes, "H264ParameterSetsTest");
            Assert.True(nalCount == 4 && paramCount == 2);
            Assert.That(arr1, Is.EqualTo(bytes), "H264ParameterSetsTest roundtrip");
        }
Exemplo n.º 17
0
        static VTDecompressionSession Create(VTDecompressionOutputCallback outputCallback,
                                             CMVideoFormatDescription formatDescription,
                                             VTVideoDecoderSpecification decoderSpecification,          // hardware acceleration is default behavior on iOS. no opt-in required.
                                             NSDictionary destinationImageBufferAttributes,
                                             DecompressionOutputCallback cback)
        {
            if (formatDescription == null)
            {
                throw new ArgumentNullException("formatDescription");
            }

            var callbackHandle = GCHandle.Alloc(outputCallback);
            var callbackStruct = new VTDecompressionOutputCallbackRecord()
            {
                Proc = cback,
                DecompressionOutputRefCon = GCHandle.ToIntPtr(callbackHandle)
            };
            IntPtr ret;

            var result = VTDecompressionSessionCreate(IntPtr.Zero, formatDescription.Handle,
                                                      decoderSpecification != null ? decoderSpecification.Dictionary.Handle : IntPtr.Zero,
                                                      destinationImageBufferAttributes != null ? destinationImageBufferAttributes.Handle : IntPtr.Zero,
                                                      ref callbackStruct,
                                                      out ret);

            if (result == VTStatus.Ok && ret != IntPtr.Zero)
            {
                return new VTDecompressionSession(ret, true)
                       {
                           callbackHandle = callbackHandle
                       }
            }
            ;

            callbackHandle.Free();
            return(null);
        }
Exemplo n.º 18
0
        public void CallForEachSample()
        {
            using (var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB)) {
                CMFormatDescriptionError fde;
                using (var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde)) {
                    var sampleTiming = new CMSampleTimingInfo();
                    CMSampleBufferError sbe;
                    using (var sb = CMSampleBuffer.CreateForImageBuffer(pixelBuffer, true, desc, sampleTiming, out sbe)) {
                        int i      = 0;
                        var result = sb.CallForEachSample(delegate(CMSampleBuffer buffer, int index) {
                            i++;
                            Assert.AreSame(buffer, sb, "same-1");
                            return(CMSampleBufferError.CannotSubdivide);
                        });
                        Assert.That(result, Is.EqualTo(CMSampleBufferError.CannotSubdivide), "custom error");
                        Assert.That(i, Is.EqualTo(1), "1");

                        Assert.Throws <ArgumentNullException> (delegate {
                            sb.CallForEachSample(null);
                        }, "null");
                    }
                }
            }
        }
Exemplo n.º 19
0
 public void VideoFormatDescriptionConstructors()
 {
     using (var obj = new CMVideoFormatDescription(CMVideoCodecType.H264, new CMVideoDimensions(960, 540))) {
     }
 }
		// HACK: Change CMFormatDescription to CMVideoFormatDescription
		public bool SetupAssetWriterVideoInput (CMVideoFormatDescription currentFormatDescription)
		{
			//Console.WriteLine ("Setting up Video Asset Writer");
			float bitsPerPixel;
			// HACK: Change VideoDimensions to Dimensions, as this type was changed to CMVideoFormatDescription
			var dimensions = currentFormatDescription.Dimensions;
			int numPixels = dimensions.Width * dimensions.Height;
			int bitsPerSecond;

			// Assume that lower-than-SD resolution are intended for streaming, and use a lower bitrate
			bitsPerPixel = numPixels < (640 * 480) ? 4.05F : 11.4F;

			bitsPerSecond = (int) (numPixels * bitsPerPixel);

			NSDictionary videoCompressionSettings = new NSDictionary (
				AVVideo.CodecKey, AVVideo.CodecH264,
				AVVideo.WidthKey, dimensions.Width,
				AVVideo.HeightKey,dimensions.Height,
				AVVideo.CompressionPropertiesKey, new NSDictionary(
					AVVideo.AverageBitRateKey, bitsPerSecond,
					AVVideo.MaxKeyFrameIntervalKey, 30
				)
			);

			if (assetWriter.CanApplyOutputSettings (videoCompressionSettings, AVMediaType.Video)){
				// HACK: Change NSDictionary into AVVideoSettingsCompressed created using that NSDictionary (videoCompressionSettings)
				assetWriterVideoIn = new AVAssetWriterInput (AVMediaType.Video, new AVVideoSettingsCompressed( videoCompressionSettings));
				assetWriterVideoIn.ExpectsMediaDataInRealTime = true;
				assetWriterVideoIn.Transform = TransformFromCurrentVideoOrientationToOrientation (ReferenceOrientation);

				if (assetWriter.CanAddInput (assetWriterVideoIn))
					assetWriter.AddInput (assetWriterVideoIn);
				else {
					Console.WriteLine ("Couldn't add asset writer video input.");
					return false;
				}
			} else
				Console.WriteLine ("Couldn't apply video output settings.");

			return true;
		}
Exemplo n.º 21
0
        public virtual void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            // HACK: Change CMSampleBuffer.GetFormatDescription() to CMSampleBuffer.GetVideoFormatDescription()
            // HACK Change CMFormatDescription to CMVideoFormatDescription
            // CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription ();
            CMVideoFormatDescription formatDescription = sampleBuffer.GetVideoFormatDescription();

            if (connection == videoConnection)
            {
                // Get framerate
                CMTime timestamp = sampleBuffer.PresentationTimeStamp;
                CalculateFramerateAtTimestamp(timestamp);

                // Get frame dimensions (for onscreen display)
                if (VideoDimensions.IsEmpty)
                {
                    // HACK: Change GetVideoPresentationDimensions() to GetPresentationDimensions()
                    // VideoDimensions = formatDescription.GetVideoPresentationDimensions (true, false);
                    VideoDimensions = formatDescription.GetPresentationDimensions(true, false);
                }

                // Get the buffer type
                if (VideoType == 0)
                {
                    VideoType = formatDescription.MediaSubType;
                }

                // Synchronously process the pixel buffer to de-green it.
                using (var pixelBuffer = sampleBuffer.GetImageBuffer())
                    ProcessPixelBuffer(pixelBuffer);

                previewBufferQueue.Enqueue(sampleBuffer);

                //var writeBuffer = sampleBuffer.Duplicate ();
                InvokeOnMainThread(() => {
                    INativeObject j = previewBufferQueue.Dequeue();

                    var sbuf = j as CMSampleBuffer;
                    if (sbuf == null)
                    {
#if DEBUG
                        // Record the current sampleBuffer.ClassHandle
                        // Then run another iteration and on the next one, print the ClassHandle
                        Console.WriteLine("The type is {0}", j.ToString());
#endif
                        return;
                    }

                    using (CVImageBuffer pixBuf = sbuf.GetImageBuffer()) {
                        if (PixelBufferReadyForDisplay != null)
                        {
                            PixelBufferReadyForDisplay(pixBuf);
                        }
                    }
                });
            }
            // keep a reference to 'sampleBuffer', movieWritingQueue will remove it
            CompleteBufferUse(sampleBuffer);

            movieWritingQueue.DispatchAsync(() => {
                if (assetWriter != null)
                {
                    bool wasReadyToRecord = (readyToRecordAudio && readyToRecordVideo);

                    if (connection == videoConnection)
                    {
                        // Initialize the video input if this is not done yet
                        if (!readyToRecordVideo)
                        {
                            readyToRecordVideo = SetupAssetWriterVideoInput(formatDescription);
                        }

                        // Write the video data to file
                        if (readyToRecordVideo && readyToRecordAudio)
                        {
                            WriteSampleBuffer(sampleBuffer, AVMediaType.Video);
                        }
                    }
                    else if (connection == audioConnection)
                    {
                        if (!readyToRecordAudio)
                        {
                            readyToRecordAudio = SetupAssetWriterAudioInput(formatDescription);
                        }

                        if (readyToRecordAudio && readyToRecordVideo)
                        {
                            WriteSampleBuffer(sampleBuffer, AVMediaType.Audio);
                        }
                    }
                    bool isReadyToRecord = (readyToRecordAudio && readyToRecordVideo);

                    if (!wasReadyToRecord && isReadyToRecord)
                    {
                        recordingWillBeStarted = false;
                        IsRecording            = true;

                        if (RecordingDidStart != null)
                        {
                            RecordingDidStart();
                        }
                    }
                }
                CompleteBufferUse(sampleBuffer);
            });
        }