public void SendTestPatternSample(object state) { try { if (SampleReady != null && !_isDisposing) { lock (_vpxEncoder) { unsafe { byte[] sampleBuffer = null; byte[] encodedBuffer = null; var stampedTestPattern = _testPattern.Clone() as System.Drawing.Image; AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern"); sampleBuffer = VideoUtils.BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap); fixed(byte *p = sampleBuffer) { byte[] convertedFrame = null; _colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, (int)_width, (int)_height, (int)_stride, VideoSubTypesEnum.I420, ref convertedFrame); fixed(byte *q = convertedFrame) { int encodeResult = _vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer); if (encodeResult != 0) { throw new ApplicationException("VPX encode of video sample failed."); } } } stampedTestPattern.Dispose(); SampleReady?.Invoke(encodedBuffer); } } } } catch (Exception excp) { logger.LogError("Exception SendTestPatternSample. " + excp); } }
/// <summary> /// Used when the video source is originating as bitmaps produced locally. For example /// the audio scope generates bitmaps in response to an audio signal. The generated bitmaps /// then need to be encoded and transmitted to the remote party. /// </summary> /// <param name="bmp">The locally generated bitmap to transmit to the remote party.</param> private void LocalBitmapAvailable(Bitmap bmp) { if (_vpxEncoder == null) { _extBmpWidth = bmp.Width; _extBmpHeight = bmp.Height; _extBmpStride = (int)VideoUtils.GetStride(bmp); _vpxEncoder = new VpxEncoder(); int res = _vpxEncoder.InitEncoder((uint)bmp.Width, (uint)bmp.Height, (uint)_extBmpStride); if (res != 0) { throw new ApplicationException("VPX encoder initialisation failed."); } _imgEncConverter = new ImageConvert(); } var sampleBuffer = VideoUtils.BitmapToRGB24(bmp); unsafe { fixed(byte *p = sampleBuffer) { byte[] convertedFrame = null; _imgEncConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, _extBmpWidth, _extBmpHeight, _extBmpStride, VideoSubTypesEnum.I420, ref convertedFrame); fixed(byte *q = convertedFrame) { byte[] encodedBuffer = null; int encodeResult = _vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer); if (encodeResult != 0) { throw new ApplicationException("VPX encode of video sample failed."); } base.SendVp8Frame(_rtpVideoTimestampPeriod, (int)SDPMediaFormatsEnum.VP8, encodedBuffer); } } } bmp.Dispose(); }