private static void Initialise() { _testPattern = new Bitmap(TEST_PATTERN_IMAGE_PATH); _vp8Encoder = new Vp8Codec(); _vp8Encoder.InitialiseEncoder((uint)_testPattern.Width, (uint)_testPattern.Height); _vp8Decoder = new Vp8Codec(); _vp8Decoder.InitialiseDecoder(); }
public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) { if (!_isClosed) { if (_vp8Encoder == null) { _vp8Encoder = new Vp8Codec(); _vp8Encoder.InitialiseEncoder((uint)width, (uint)height); } if (_encodeBmp == null) { _encodeBmp = new SoftwareBitmap(BitmapPixelFormat.Rgba8, width, height); } if (OnVideoSourceEncodedSample != null) { //byte[] i420Buffer = PixelConverter.RGBtoI420(rgb24Sample, width, height); //byte[] encodedBuffer = _vp8Encoder.Encode(i420Buffer, _forceKeyFrame); SetBitmapData(sample, _encodeBmp, pixelFormat); var nv12bmp = SoftwareBitmap.Convert(_encodeBmp, BitmapPixelFormat.Nv12); byte[] nv12Buffer = null; using (BitmapBuffer buffer = nv12bmp.LockBuffer(BitmapBufferAccessMode.Read)) { using (var reference = buffer.CreateReference()) { unsafe { byte *dataInBytes; uint capacity; ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacity); nv12Buffer = new byte[capacity]; Marshal.Copy((IntPtr)dataInBytes, nv12Buffer, 0, (int)capacity); } } } byte[] encodedBuffer = _vp8Encoder.Encode(nv12Buffer, _forceKeyFrame); if (encodedBuffer != null) { //Console.WriteLine($"encoded buffer: {encodedBuffer.HexStr()}"); uint fps = (durationMilliseconds > 0) ? 1000 / durationMilliseconds : DEFAULT_FRAMES_PER_SECOND; uint durationRtpTS = VIDEO_SAMPLING_RATE / fps; OnVideoSourceEncodedSample.Invoke(durationRtpTS, encodedBuffer); } if (_forceKeyFrame) { _forceKeyFrame = false; } } } }
/// <summary> /// Creates a new basic RTP session that captures and renders audio to/from the default system devices. /// </summary> /// <param name="options">The options for the video source. If null then this end point will /// act as a video sink only.</param> public WindowsVideoEndPoint(IVideoSource externalSource = null) { if (externalSource != null) { _externalSource = externalSource; _externalSource.OnVideoSourceRawSample += ExternalSource_OnVideoSourceRawSample; } _vp8Decoder = new Vp8Codec(); _vp8Decoder.InitialiseDecoder(); }
/// <summary> /// Attempts to create a new video source from a local video capture device. /// </summary> /// <param name="encodingOnly">Optional. If set to true this instance will NOT attempt to initialise any /// capture devices. It will provide encode and decode services for external video sources.</param> /// <param name="width">Optional. If specified the video capture device will be requested to initialise with this frame /// width. If the attempt fails an exception is thrown. If not specified the device's default frame width will /// be used.</param> /// <param name="height">Optional. If specified the video capture device will be requested to initialise with this frame /// height. If the attempt fails an exception is thrown. If not specified the device's default frame height will /// be used.</param> /// <param name="fps">Optional. If specified the video capture device will be requested to initialise with this frame /// rate. If the attempt fails an exception is thrown. If not specified the device's default frame rate will /// be used.</param> public WindowsVideoEndPoint(bool encodingOnly = false, uint width = 0, uint height = 0, uint fps = 0) { _encodingOnly = encodingOnly; _width = width; _height = height; _fpsNumerator = fps; _vp8Decoder = new Vp8Codec(); _vp8Decoder.InitialiseDecoder(); if (!_encodingOnly) { _mediaCapture = new MediaCapture(); _mediaCapture.Failed += VideoCaptureDevice_Failed; } }
private void ExternalSource_OnVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] rgb24Sample) { if (_vp8Encoder == null) { _vp8Encoder = new Vp8Codec(); _vp8Encoder.InitialiseEncoder((uint)width, (uint)height); } if (OnVideoSourceEncodedSample != null) { byte[] encodedBuffer = null; if (_selectedSourceFormat == VideoCodecsEnum.VP8) { byte[] i420Buffer = PixelConverter.RGBtoI420(rgb24Sample, width, height); encodedBuffer = _vp8Encoder.Encode(i420Buffer, _forceKeyFrame); } //else if (VIDEO_CODEC == SDPMediaFormatsEnum.H264) //{ // var i420Frame = _videoFrameConverter.Convert(sampleBuffer); // _presentationTimestamp += VIDEO_TIMESTAMP_SPACING; // i420Frame.key_frame = _forceKeyFrame ? 1 : 0; // i420Frame.pts = _presentationTimestamp; // encodedBuffer = _ffmpegEncoder.Encode(i420Frame); //} else { throw new ApplicationException($"Video codec is not supported."); } if (encodedBuffer != null) { //Console.WriteLine($"encoded buffer: {encodedBuffer.HexStr()}"); OnVideoSourceEncodedSample.Invoke(_selectedSourceFormat, VIDEO_TIMESTAMP_SPACING, encodedBuffer); } if (_forceKeyFrame) { _forceKeyFrame = false; } } }
/// <summary> /// Attempts to initialise the local video capture device. /// </summary> /// <param name="width">The frame width to attempt to initialise the video capture device with. Set as 0 to use default.</param> /// <param name="height">The frame height to attempt to initialise the video capture device with. Set as 0 to use default.</param> /// <param name="fps">The frame rate, in frames per second, to attempt to initialise the video capture device with. /// Set as 0 to use default.</param> private async Task <bool> InitialiseDevice(uint width, uint height, uint fps) { if (width == 0 && height == 0 && fps == 0) { // If no specific width, height or frame rate was requested then use the device's current settings. // In shared mode it's not possible to adjust the source format so if the frame is the wrong pixel // format it will need to be transformed on a frame by frame basis. var mediaCaptureSettings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Video, SharingMode = MediaCaptureSharingMode.SharedReadOnly }; //await _mediaCapture.InitializeAsync(mediaCaptureSettings).AsTask().ConfigureAwait(false); await _mediaCapture.InitializeAsync(mediaCaptureSettings); var mediaFrameSource = _mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoRecord && source.Value.Info.SourceKind == MediaFrameSourceKind.Color).Value; //_mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource).AsTask().ConfigureAwait(false); _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource); _mediaFrameSource = mediaFrameSource; } else { // If specific capture settings have been requested then the device needs to be initialised in // exclusive mode as the current settings and format will most likely be changed. var mediaCaptureSettings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Video, SharingMode = MediaCaptureSharingMode.ExclusiveControl }; await _mediaCapture.InitializeAsync(mediaCaptureSettings).AsTask().ConfigureAwait(false); var mediaFrameSource = _mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoRecord && source.Value.Info.SourceKind == MediaFrameSourceKind.Color && source.Value.SupportedFormats.Any(x => x.Subtype == VIDEO_DESIRED_PIXEL_FORMAT && (_width == 0 || x.VideoFormat.Width == _width) && (_height == 0 || x.VideoFormat.Height == _height) && (_fpsNumerator == 0 || x.FrameRate.Numerator == _fpsNumerator))).Value; if (mediaFrameSource == null) { // Fallback to accepting any pixel format and use a software transform on each frame. mediaFrameSource = _mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoRecord && source.Value.Info.SourceKind == MediaFrameSourceKind.Color && source.Value.SupportedFormats.Any(x => (_width == 0 || x.VideoFormat.Width == _width) && (_height == 0 || x.VideoFormat.Height == _height) && (_fpsNumerator == 0 || x.FrameRate.Numerator == _fpsNumerator))).Value; } if (mediaFrameSource == null) { throw new ApplicationException("The video capture device does not support a compatible video format for the requested parameters."); } _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource).AsTask().ConfigureAwait(false); // If there's a format that matches the desired pixel format set that. var idealFormat = mediaFrameSource.SupportedFormats.FirstOrDefault(x => x.Subtype == VIDEO_DESIRED_PIXEL_FORMAT && (_width == 0 || x.VideoFormat.Width == _width) && (_height == 0 || x.VideoFormat.Height == _height) && (_fpsNumerator == 0 || x.FrameRate.Numerator == _fpsNumerator)); if (idealFormat != null) { await mediaFrameSource.SetFormatAsync(idealFormat).AsTask().ConfigureAwait(false); } _mediaFrameSource = mediaFrameSource; } // Frame source and format have now been successfully set. _width = _mediaFrameSource.CurrentFormat.VideoFormat.Width; _height = _mediaFrameSource.CurrentFormat.VideoFormat.Height; _fpsNumerator = _mediaFrameSource.CurrentFormat.FrameRate.Numerator; _fpsDenominator = _mediaFrameSource.CurrentFormat.FrameRate.Denominator; _vp8Encoder = new Vp8Codec(); _vp8Encoder.InitialiseEncoder(_width, _height); _mediaFrameReader.FrameArrived += FrameArrivedHandler; _mediaFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime; return(true); }