public void Dispose() { _isDisposing = true; if (_vpxEncodeCtx != null) { vpx_codec.VpxCodecDestroy(_vpxEncodeCtx); } if (_vpxEncodeImg != null) { VpxImage.VpxImgFree(_vpxEncodeImg); } if (_vpxDecodeCtx != null) { vpx_codec.VpxCodecDestroy(_vpxDecodeCtx); } }
// https://swift.im/git/swift-contrib/tree/Swiften/ScreenSharing/VP8Decoder.cpp?id=6247ed394302ff2cf1f33a71df808bebf7241242 public List <byte[]> Decode(byte[] buffer, int bufferSize, out uint width, out uint height) { List <byte[]> decodedBuffers = new List <byte[]>(); width = 0; height = 0; if (!_isDisposing) { unsafe { fixed(byte *pBuffer = buffer) { var decodeRes = vpx_decoder.VpxCodecDecode(_vpxDecodeCtx, pBuffer, (uint)bufferSize, IntPtr.Zero, 1); if (decodeRes != VpxCodecErrT.VPX_CODEC_OK) { // The reason not to throw an exception here is that a partial frame can easily be passed to the decoder. // This will result in a decode failure but should not affect the decode of the next full frame. //throw new ApplicationException($"VP8 decode attempt failed, {vpx_codec.VpxCodecErrToString(decodeRes)}."); logger.LogWarning($"VP8 decode attempt failed, {vpx_codec.VpxCodecErrToString(decodeRes)}."); } else { IntPtr iter = IntPtr.Zero; VpxImage img = vpx_decoder.VpxCodecGetFrame(_vpxDecodeCtx, (void **)&iter); while (img != null) { // Convert the VPX image buffer to an I420 buffer WITHOUT the stride. width = img.DW; height = img.DH; int ySize = (int)(width * height); int uvSize = (int)(((width + 1) / 2) * ((height + 1) / 2) * 2); int uvWidth = (int)(width + 1) / 2; var yPlane = (byte *)img.PlaneY; var uPlane = (byte *)img.PlaneU; var vPlane = (byte *)img.PlaneV; byte[] decodedBuffer = new byte[ySize + uvSize]; for (int row = 0; row < height; row++) { Marshal.Copy((IntPtr)(yPlane + row * img.Stride[0]), decodedBuffer, (int)(row * width), (int)width); if (row < height / 2) { Marshal.Copy((IntPtr)(uPlane + row * img.Stride[1]), decodedBuffer, ySize + row * uvWidth, uvWidth); Marshal.Copy((IntPtr)(vPlane + row * img.Stride[2]), decodedBuffer, ySize + uvSize / 2 + row * uvWidth, uvWidth); } } decodedBuffers.Add(decodedBuffer); VpxImage.VpxImgFree(img); img = vpx_decoder.VpxCodecGetFrame(_vpxDecodeCtx, (void **)&iter); } } } } } return(decodedBuffers); }
// https://swift.im/git/swift-contrib/tree/Swiften/ScreenSharing/VP8Decoder.cpp?id=6247ed394302ff2cf1f33a71df808bebf7241242 public List <byte[]> Decode(byte[] buffer, int bufferSize, out uint width, out uint height) { List <byte[]> decodedBuffers = new List <byte[]>(); width = 0; height = 0; if (!_isDisposing) { unsafe { fixed(byte *pBuffer = buffer) { var decodeRes = vpx_decoder.VpxCodecDecode(_vpxDecodeCtx, pBuffer, (uint)bufferSize, IntPtr.Zero, 1); if (decodeRes != VpxCodecErrT.VPX_CODEC_OK) { // The reason not to throw an exception here is that a partial frame can easily be passed to the decoder. // This will result in a decode failure but should not affect the decode of the next full frame. //throw new ApplicationException($"VP8 decode attempt failed, {vpx_codec.VpxCodecErrToString(decodeRes)}."); logger.LogWarning($"VP8 decode attempt failed, {vpx_codec.VpxCodecErrToString(decodeRes)}."); } else { IntPtr iter = IntPtr.Zero; VpxImage img = vpx_decoder.VpxCodecGetFrame(_vpxDecodeCtx, (void **)&iter); while (img != null) { // Convert the VPX image buffer to an I420 buffer WITHOUT the stride. width = img.DW; height = img.DH; int sz = (int)(width * height); var yPlane = (byte *)img.PlaneY; var uPlane = (byte *)img.PlaneU; var vPlane = (byte *)img.PlaneV; byte[] decodedBuffer = new byte[width * height * 3 / 2]; for (uint row = 0; row < height; row++) { Marshal.Copy((IntPtr)(yPlane + row * img.Stride[0]), decodedBuffer, (int)(row * width), (int)width); if (row < height / 2) { Marshal.Copy((IntPtr)(uPlane + row * img.Stride[1]), decodedBuffer, (int)(sz + row * (width / 2)), (int)width / 2); Marshal.Copy((IntPtr)(vPlane + row * img.Stride[2]), decodedBuffer, (int)(sz + sz / 4 + row * (width / 2)), (int)width / 2); } } // This block converts the VPX image buffer directly to RGB24 but it's way too slow. // Was taking 60 to 90ms on Win10 i7 CPU. //byte[] data = new byte[width * height * 3]; //int i = 0; //for (uint imgY = 0; imgY < height; imgY++) //{ // for (uint imgX = 0; imgX < width; imgX++) // { // int y = yPlane[imgY * img.Stride[0] + imgX]; // int u = uPlane[(imgY / 2) * img.Stride[1] + (imgX / 2)]; // int v = vPlane[(imgY / 2) * img.Stride[2] + (imgX / 2)]; // int c = y - 16; // int d = (u - 128); // int e = (v - 128); // // TODO: adjust colors ? // int r = (298 * c + 409 * e + 128) >> 8; // int g = (298 * c - 100 * d - 208 * e + 128) >> 8; // int b = (298 * c + 516 * d + 128) >> 8; // r = r < 0 ? 0 : r > 255 ? 255 : r; // g = g < 0 ? 0 : g > 255 ? 255 : g; // b = b < 0 ? 0 : b > 255 ? 255 : b; // // TODO: cast instead of clamp8 // data[i + 0] = (byte)(b); // data[i + 1] = (byte)(g); // data[i + 2] = (byte)(r); // i += 3; // } //} //decodedBuffers.Add(data); decodedBuffers.Add(decodedBuffer); VpxImage.VpxImgFree(img); img = vpx_decoder.VpxCodecGetFrame(_vpxDecodeCtx, (void **)&iter); } } } } } return(decodedBuffers); }