private static void OnVideoResolutionChangedEvent(uint width, uint height, uint stride) { try { if (_vpxEncoder == null || (_vpxEncoder.GetWidth() != width || _vpxEncoder.GetHeight() != height || _vpxEncoder.GetStride() != stride)) { _vpxEncoderReady = false; if (_vpxEncoder != null) { _vpxEncoder.Dispose(); } _vpxEncoder = new VpxEncoder(); _vpxEncoder.InitEncoder(width, height, stride); logger.LogInformation($"VPX encoder initialised with width {width}, height {height} and stride {stride}."); _vpxEncoderReady = true; } } catch (Exception excp) { logger.LogWarning("Exception MfSampleGrabber_OnVideoResolutionChangedEvent. " + excp.Message); } }
public TestPatternVideoSource(string testPatternSource, int framesPerSecond) { _testPatternPath = testPatternSource; _framesPerSecond = (framesPerSecond > 0 && framesPerSecond <= DEFAULT_FRAMES_PER_SECOND) ? framesPerSecond : DEFAULT_FRAMES_PER_SECOND; _samplePeriod = 1000 / framesPerSecond; if (!String.IsNullOrEmpty(testPatternSource) && !File.Exists(testPatternSource)) { logger.LogWarning($"Requested test pattern file could not be found {testPatternSource}."); } if (_testPatternPath == null) { if (!File.Exists(FALLBACK_TEST_PATTERN_IMAGE_PATH)) { throw new ApplicationException($"The fallback test pattern image file could not be found {FALLBACK_TEST_PATTERN_IMAGE_PATH}."); } else { _testPatternPath = FALLBACK_TEST_PATTERN_IMAGE_PATH; } } logger.LogDebug($"Loading test pattern from {_testPatternPath}."); _testPattern = new Bitmap(_testPatternPath); // Get the stride. Rectangle rect = new Rectangle(0, 0, _testPattern.Width, _testPattern.Height); System.Drawing.Imaging.BitmapData bmpData = _testPattern.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite, _testPattern.PixelFormat); _width = (uint)_testPattern.Width; _height = (uint)_testPattern.Height; // Get the address of the first line. _stride = (uint)bmpData.Stride; _testPattern.UnlockBits(bmpData); // Initialise the video codec and color converter. _vpxEncoder = new VpxEncoder(); _vpxEncoder.InitEncoder(_width, _height, _stride); _colorConverter = new ImageConvert(); }
/// <summary> /// Video resolution changed event handler. /// </summary> /// <param name="width">The new video frame width.</param> /// <param name="height">The new video frame height.</param> /// <param name="stride">The new video frame stride.</param> private VpxEncoder InitialiseVpxEncoder(uint width, uint height, uint stride) { try { var vpxEncoder = new VpxEncoder(); vpxEncoder.InitEncoder(width, height, stride); logger.LogInformation($"VPX encoder initialised with width {width}, height {height} and stride {stride}."); return(vpxEncoder); } catch (Exception excp) { logger.LogWarning("Exception InitialiseVpxEncoder. " + excp.Message); throw; } }
/// <summary> /// Used when the video source is originating as bitmaps produced locally. For example /// the audio scope generates bitmaps in response to an audio signal. The generated bitmaps /// then need to be encoded and transmitted to the remote party. /// </summary> /// <param name="bmp">The locally generated bitmap to transmit to the remote party.</param> private void LocalBitmapAvailable(Bitmap bmp) { if (_vpxEncoder == null) { _extBmpWidth = bmp.Width; _extBmpHeight = bmp.Height; _extBmpStride = (int)VideoUtils.GetStride(bmp); _vpxEncoder = new VpxEncoder(); int res = _vpxEncoder.InitEncoder((uint)bmp.Width, (uint)bmp.Height, (uint)_extBmpStride); if (res != 0) { throw new ApplicationException("VPX encoder initialisation failed."); } _imgEncConverter = new ImageConvert(); } var sampleBuffer = VideoUtils.BitmapToRGB24(bmp); unsafe { fixed(byte *p = sampleBuffer) { byte[] convertedFrame = null; _imgEncConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, _extBmpWidth, _extBmpHeight, _extBmpStride, VideoSubTypesEnum.I420, ref convertedFrame); fixed(byte *q = convertedFrame) { byte[] encodedBuffer = null; int encodeResult = _vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer); if (encodeResult != 0) { throw new ApplicationException("VPX encode of video sample failed."); } base.SendVp8Frame(_rtpVideoTimestampPeriod, (int)SDPMediaFormatsEnum.VP8, encodedBuffer); } } } bmp.Dispose(); }
private async Task ExpireConnections() { try { logger.LogDebug("Starting expire connections thread."); byte[] encodedBuffer = null; if (File.Exists(_expiredImagePath)) { Bitmap expiredImage = expiredImage = new Bitmap(_expiredImagePath); // Get the stride. Rectangle rect = new Rectangle(0, 0, expiredImage.Width, expiredImage.Height); System.Drawing.Imaging.BitmapData bmpData = expiredImage.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite, expiredImage.PixelFormat); // Get the address of the first line. int stride = bmpData.Stride; expiredImage.UnlockBits(bmpData); // Initialise the video codec and color converter. SIPSorceryMedia.VpxEncoder vpxEncoder = new VpxEncoder(); vpxEncoder.InitEncoder((uint)expiredImage.Width, (uint)expiredImage.Height, (uint)stride); SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert(); byte[] sampleBuffer = null; sampleBuffer = BitmapToRGB24(expiredImage); unsafe { fixed(byte *p = sampleBuffer) { byte[] convertedFrame = null; colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, expiredImage.Width, expiredImage.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame); fixed(byte *q = convertedFrame) { int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer); if (encodeResult != 0) { logger.LogWarning("VPX encode of expired image failed."); } } } } expiredImage.Dispose(); vpxEncoder.Dispose(); } while (!_exit) { foreach (var conn in _webRtcConnections.Where(x => DateTime.Now.Subtract(x.Value.CreatedAt).TotalSeconds > _connectionTimeLimitSeconds).Select(x => x.Value)) { OnMp4MediaSampleReady -= conn.SendMedia; OnTestPatternSampleReady -= conn.SendMedia; if (conn.WebRtcSession.IsDtlsNegotiationComplete && !conn.WebRtcSession.IsClosed && encodedBuffer != null) { // Send the expired frame 3 times as a crude attempt to cope with packet loss. conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer); await Task.Delay(1); conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer); await Task.Delay(1); conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer); } conn.WebRtcSession.Close("expired"); } await Task.Delay(1000); } } catch (Exception excp) { logger.LogError("Exception ExpireConnections. " + excp); } }
private async void SampleTestPattern() { try { logger.LogDebug("Starting test pattern sampling thread."); _isTestPatternSampling = true; Bitmap testPattern = new Bitmap(_testPatternImagePath); // Get the stride. Rectangle rect = new Rectangle(0, 0, testPattern.Width, testPattern.Height); System.Drawing.Imaging.BitmapData bmpData = testPattern.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite, testPattern.PixelFormat); // Get the address of the first line. int stride = bmpData.Stride; testPattern.UnlockBits(bmpData); // Initialise the video codec and color converter. SIPSorceryMedia.VpxEncoder vpxEncoder = new VpxEncoder(); vpxEncoder.InitEncoder((uint)testPattern.Width, (uint)testPattern.Height, (uint)stride); SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert(); byte[] sampleBuffer = null; byte[] encodedBuffer = null; int sampleCount = 0; uint rtpTimestamp = 0; while (!_exit) { if (OnTestPatternSampleReady == null) { logger.LogDebug("No active clients, test pattern sampling paused."); break; } else { var stampedTestPattern = testPattern.Clone() as System.Drawing.Image; AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern"); sampleBuffer = BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap); unsafe { fixed(byte *p = sampleBuffer) { byte[] convertedFrame = null; colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, testPattern.Width, testPattern.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame); fixed(byte *q = convertedFrame) { int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer); if (encodeResult != 0) { logger.LogWarning("VPX encode of video sample failed."); continue; } } } stampedTestPattern.Dispose(); OnTestPatternSampleReady?.Invoke(SDPMediaTypesEnum.video, rtpTimestamp, encodedBuffer); sampleCount++; rtpTimestamp += VP8_TIMESTAMP_SPACING; } await Task.Delay(30); } } testPattern.Dispose(); vpxEncoder.Dispose(); } catch (Exception excp) { logger.LogError("Exception SampleTestPattern. " + excp); } finally { logger.LogDebug("test pattern sampling thread stopped."); _isTestPatternSampling = false; } }
private static void SendTestPattern() { try { unsafe { Bitmap testPattern = new Bitmap(TEST_PATTERN_IMAGE_PATH); // Get the stride. Rectangle rect = new Rectangle(0, 0, testPattern.Width, testPattern.Height); System.Drawing.Imaging.BitmapData bmpData = testPattern.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite, testPattern.PixelFormat); // Get the address of the first line. int stride = bmpData.Stride; testPattern.UnlockBits(bmpData); // Initialise the video codec and color converter. SIPSorceryMedia.VpxEncoder vpxEncoder = new VpxEncoder(); vpxEncoder.InitEncoder((uint)testPattern.Width, (uint)testPattern.Height, (uint)stride); SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert(); byte[] sampleBuffer = null; byte[] encodedBuffer = null; int sampleCount = 0; uint rtpTimestamp = 0; while (!_exit) { if (OnTestPatternSampleReady != null) { var stampedTestPattern = testPattern.Clone() as System.Drawing.Image; AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern"); sampleBuffer = BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap); fixed(byte *p = sampleBuffer) { byte[] convertedFrame = null; colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, testPattern.Width, testPattern.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame); fixed(byte *q = convertedFrame) { int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer); if (encodeResult != 0) { logger.LogWarning("VPX encode of video sample failed."); continue; } } } stampedTestPattern.Dispose(); stampedTestPattern = null; OnTestPatternSampleReady(rtpTimestamp, encodedBuffer); encodedBuffer = null; sampleCount++; rtpTimestamp += VP8_TIMESTAMP_SPACING; } Thread.Sleep(30); } } } catch (Exception excp) { logger.LogError("Exception SendTestPattern. " + excp); } }
private void SampleWebCam(MFVideoSampler videoSampler, VideoMode videoMode, CancellationTokenSource cts) { try { Thread.CurrentThread.Name = "vidsampler_" + videoMode.DeviceIndex + "_" + videoMode.Width + "_" + videoMode.Height; var vpxEncoder = new VpxEncoder(); // TODO: The last parameter passed to the vpx encoder init needs to be the frame stride not the width. vpxEncoder.InitEncoder(Convert.ToUInt32(videoMode.Width), Convert.ToUInt32(videoMode.Height), Convert.ToUInt32(videoMode.Width)); // var videoSampler = new MFVideoSampler(); //videoSampler.Init(videoMode.DeviceIndex, videoMode.Width, videoMode.Height); // videoSampler.InitFromFile(); while (!_stop && !cts.IsCancellationRequested) { byte[] videoSample = null; var sample = videoSampler.GetSample(ref videoSample); //if (result == NAudio.MediaFoundation.MediaFoundationErrors.MF_E_HW_MFT_FAILED_START_STREAMING) //{ // logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use."); // OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use."); // break; //} //else if (result != 0) //{ // logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result); // OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result); // break; //} //else if (sample?.HasVideoSample == true) { // This event sends the raw bitmap to the WPF UI. OnLocalVideoSampleReady?.Invoke(videoSample, videoSampler.Width, videoSampler.Height); // This event encodes the sample and forwards it to the RTP manager for network transmission. if (OnLocalVideoEncodedSampleReady != null) { IntPtr rawSamplePtr = Marshal.AllocHGlobal(videoSample.Length); Marshal.Copy(videoSample, 0, rawSamplePtr, videoSample.Length); byte[] yuv = null; unsafe { // TODO: using width instead of stride. _imageConverter.ConvertRGBtoYUV((byte *)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), Convert.ToInt32(videoMode.Width), VideoSubTypesEnum.I420, ref yuv); //_imageConverter.ConvertToI420((byte*)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), ref yuv); } Marshal.FreeHGlobal(rawSamplePtr); IntPtr yuvPtr = Marshal.AllocHGlobal(yuv.Length); Marshal.Copy(yuv, 0, yuvPtr, yuv.Length); byte[] encodedBuffer = null; unsafe { vpxEncoder.Encode((byte *)yuvPtr, yuv.Length, _encodingSample++, ref encodedBuffer); } Marshal.FreeHGlobal(yuvPtr); OnLocalVideoEncodedSampleReady(encodedBuffer); } } } videoSampler.Stop(); vpxEncoder.Dispose(); } catch (Exception excp) { logger.LogError($"Exception SampleWebCam. {excp.Message}"); } }