/// <summary> /// Core method to overlay the given video stream with its /// properties on the output stream (given by the pointer). /// </summary> /// <param name="outputDataPointer">Pointer to the output data stream</param> /// <param name="videoStream">The <see cref="VideoStream"/> to overlay on the output.</param> private unsafe void OverlayVideoStream(IntPtr outputDataPointer, VideoStream videoStream) { RectangleF streamPosition = videoStream.Position; var streamWidth = (int)Math.Abs(streamPosition.Width * this.outputStream.StreamWidth); var streamHeight = (int)Math.Abs(streamPosition.Height * this.outputStream.StreamHeight); var resizeStream = (streamWidth != Math.Abs(videoStream.StreamWidth) || streamHeight != Math.Abs(videoStream.StreamHeight)); if (videoStream.FlipY) { var top = 1 - streamPosition.Height - streamPosition.Top; streamPosition.Location = new PointF(streamPosition.Left, top); } var streamLeft = (int)Math.Abs(streamPosition.Left * this.outputStream.StreamWidth); var streamTop = (int)Math.Abs(streamPosition.Top * this.outputStream.StreamHeight); var resizedStream = videoStream; if (resizeStream) { resizedStream = this.ResizeBicubic(videoStream, new Size(streamWidth, streamHeight)); } byte *p; long r, g, b; // For each column in the area of the gaze cursor // overlay rectangle for (int y = 0; y < Math.Abs(resizedStream.StreamHeight); y++) { // Calculate the read/write positions // of the video stream at the area int src = 0; if (videoStream.FlipY) { src = y * resizedStream.StreamStride; } else { src = (resizedStream.StreamHeight - y - 1) * resizedStream.StreamStride; } int dst = (streamTop + y) * this.outputStream.StreamStride; // Jump to the correct column byte *sourcePointer = (byte *)(src + resizedStream.BufferPointer.ToInt32()); byte *destinationPointer = (byte *)(dst + outputDataPointer.ToInt32()); // Move to correct x-Position in the column // were the video overlay is placed destinationPointer += resizedStream.StreamBBP * streamLeft; // For each pixel in the column // starting at the correct position for (int x = 0; x < resizedStream.StreamWidth; x++) { // Get the ouput bytes p = &destinationPointer[0]; // calculate the transparency of the gaze cursor float transparency = resizedStream.Alpha; // merge source and gaze cursor overlay using // the gaze cursors transparency r = (long)((byte)(transparency * sourcePointer[RGBR]) + ((1 - transparency) * p[RGBR])); g = (long)((byte)(transparency * sourcePointer[RGBG]) + ((1 - transparency) * p[RGBG])); b = (long)((byte)(transparency * sourcePointer[RGBB]) + ((1 - transparency) * p[RGBB])); // Set the rgb values for the output destinationPointer[RGBR] = (byte)r; destinationPointer[RGBG] = (byte)g; destinationPointer[RGBB] = (byte)b; // Move source and output pointer according // to bbp sourcePointer += resizedStream.StreamBBP; destinationPointer += resizedStream.StreamBBP; } } if (resizeStream) { // Free resources of resized stream Marshal.FreeHGlobal(resizedStream.BufferPointer); } }
/// <summary> /// Resize the incoming videostream to the new stream size. /// </summary> /// <param name="sourceStream">Source video stream.</param> /// <param name="newStreamSize">New stream size.</param> /// <returns>The resized and newly allocated <see cref="VideoStream"/>.</returns> /// <remarks>The stream pointer has to be released after use with a call /// to ReleaseHGlobal(IntPtr).</remarks> private unsafe VideoStream ResizeBicubic(VideoStream sourceStream, Size newStreamSize) { // get image sizes int width = Math.Abs(sourceStream.StreamWidth); int height = Math.Abs(sourceStream.StreamHeight); int newWidth = newStreamSize.Width; int newHeight = newStreamSize.Height; VideoStream resizedStream = new VideoStream(); resizedStream.StreamBBP = sourceStream.StreamBBP; resizedStream.StreamHeight = newHeight; resizedStream.StreamWidth = newWidth; resizedStream.StreamStride = newWidth * sourceStream.StreamBBP; resizedStream.Position = sourceStream.Position; resizedStream.Alpha = sourceStream.Alpha; resizedStream.Buffer = null; resizedStream.BufferTimeLength = sourceStream.BufferTimeLength; resizedStream.BufferTimeStamp = sourceStream.BufferTimeLength; // allocate memory for the image resizedStream.BufferPointer = Marshal.AllocHGlobal(resizedStream.StreamStride * newHeight); int pixelSize = sourceStream.StreamBBP; int srcStride = sourceStream.StreamStride; int dstOffset = resizedStream.StreamStride - (pixelSize * newWidth); double factorX = (double)width / newWidth; double factorY = (double)height / newHeight; // do the job byte *src = (byte *)sourceStream.BufferPointer; byte *dst = (byte *)resizedStream.BufferPointer; // coordinates of source points and cooefficiens double ox, oy, dx, dy, k1, k2; int ox1, oy1, ox2, oy2; // destination pixel values double r, g, b; // width and height decreased by 1 int ymax = height - 1; int xmax = width - 1; // temporary pointer byte *p; // RGB for (int y = 0; y < newHeight; y++) { // Y coordinates oy = (double)((y * factorY) - 0.5f); oy1 = (int)oy; dy = oy - (double)oy1; for (int x = 0; x < newWidth; x++, dst += pixelSize) { // X coordinates ox = (double)((x * factorX) - 0.5f); ox1 = (int)ox; dx = ox - (double)ox1; // initial pixel value r = g = b = 0; for (int n = -1; n < 3; n++) { // get Y cooefficient k1 = this.BiCubicKernel(dy - (double)n); oy2 = oy1 + n; if (oy2 < 0) { oy2 = 0; } if (oy2 > ymax) { oy2 = ymax; } for (int m = -1; m < 3; m++) { // get X cooefficient k2 = k1 * this.BiCubicKernel((double)m - dx); ox2 = ox1 + m; if (ox2 < 0) { ox2 = 0; } if (ox2 > xmax) { ox2 = xmax; } // get pixel of original image p = src + (oy2 * srcStride) + (ox2 * pixelSize); r += k2 * p[RGBR]; g += k2 * p[RGBG]; b += k2 * p[RGBB]; } } dst[RGBR] = (byte)r; dst[RGBG] = (byte)g; dst[RGBB] = (byte)b; } dst += dstOffset; } return(resizedStream); }
/////////////////////////////////////////////////////////////////////////////// // Construction and Initializing methods // /////////////////////////////////////////////////////////////////////////////// #region CONSTRUCTION /// <summary> /// Initializes a new instance of the DmoMixer class. /// The parameters to the base class /// describe the number of input and output streams, which /// DirectShow calls Pins, followed by the number of parameters /// this class supports (can be zero), and the timeformat of those /// parameters (should include ParamClass.TimeFormatFlags.Reference /// if NumParameters > 0). /// </summary> public DmoMixer() : base(InputPinCount, OutputPinCount, NumParams, TimeFormatFlags.Reference) { // Initialize the data members this.bufferFlags = 0; this.inputStreams = new VideoStream[InputPinCount]; for (int i = 0; i < InputPinCount; i++) { this.inputStreams[i] = new VideoStream(); } this.outputStream = new VideoStream(); // Start describing the parameters this DMO supports. Building this // structure (painful as it is) will allow the base class to automatically // support IMediaParamInfo & IMediaParams, which allow clients to find // out what parameters you support, and to set them. // See the MSDN // docs for MP_PARAMINFO for a description of the other parameters ParamInfo backgroundColor = new ParamInfo(); backgroundColor.mopCaps = MPCaps.Jump; backgroundColor.mpdMinValue.vInt = int.MinValue; backgroundColor.mpdMaxValue.vInt = int.MaxValue; backgroundColor.mpdNeutralValue.vInt = 0; backgroundColor.mpType = MPType.INT; backgroundColor.szLabel = "BackgroundColor"; backgroundColor.szUnitText = "Color"; ParamDefine(0, backgroundColor, "BackgroundColor\0Color\0"); for (int i = 0; i < InputPinCount; i++) { ParamInfo streamLeft = new ParamInfo(); streamLeft.mopCaps = MPCaps.Jump; streamLeft.mpdMinValue.vFloat = 0; streamLeft.mpdMaxValue.vFloat = 1; streamLeft.mpdNeutralValue.vFloat = 0; streamLeft.mpType = MPType.FLOAT; streamLeft.szLabel = "Stream" + i.ToString() + "Left"; streamLeft.szUnitText = "Position"; ParamInfo streamTop = new ParamInfo(); streamTop.mopCaps = MPCaps.Jump; streamTop.mpdMinValue.vFloat = 0; streamTop.mpdMaxValue.vFloat = 1; streamTop.mpdNeutralValue.vFloat = 0; streamTop.mpType = MPType.FLOAT; streamTop.szLabel = "Stream" + i.ToString() + "Top"; streamTop.szUnitText = "Position"; ParamInfo streamWidth = new ParamInfo(); streamWidth.mopCaps = MPCaps.Jump; streamWidth.mpdMinValue.vFloat = 0; streamWidth.mpdMaxValue.vFloat = 1; streamWidth.mpdNeutralValue.vFloat = 1; streamWidth.mpType = MPType.FLOAT; streamWidth.szLabel = "Stream" + i.ToString() + "Width"; streamWidth.szUnitText = "Position"; ParamInfo streamHeight = new ParamInfo(); streamHeight.mopCaps = MPCaps.Jump; streamHeight.mpdMinValue.vFloat = 0; streamHeight.mpdMaxValue.vFloat = 1; streamHeight.mpdNeutralValue.vFloat = 1; streamHeight.mpType = MPType.FLOAT; streamHeight.szLabel = "Stream" + i.ToString() + "Height"; streamHeight.szUnitText = "Position"; ParamInfo streamAlpha = new ParamInfo(); streamAlpha.mopCaps = MPCaps.Jump; streamAlpha.mpdMinValue.vFloat = 0; streamAlpha.mpdMaxValue.vFloat = 1; streamAlpha.mpdNeutralValue.vFloat = 1; streamAlpha.mpType = MPType.FLOAT; streamAlpha.szLabel = "Stream" + i.ToString() + "Alpha"; streamAlpha.szUnitText = "Alpha"; ParamDefine((i * 5) + 1, streamLeft, "Stream" + i.ToString() + "Left\0Position\0"); ParamDefine((i * 5) + 2, streamTop, "Stream" + i.ToString() + "Top\0Position\0"); ParamDefine((i * 5) + 3, streamWidth, "Stream" + i.ToString() + "Width\0Position\0"); ParamDefine((i * 5) + 4, streamHeight, "Stream" + i.ToString() + "Height\0Position\0"); ParamDefine((i * 5) + 5, streamAlpha, "Stream" + i.ToString() + "Alpha\0Alpha\0"); } }
/// <summary> /// Resize the incoming videostream to the new stream size. /// </summary> /// <param name="sourceStream">Source video stream.</param> /// <param name="newStreamSize">New stream size.</param> /// <returns>The resized and newly allocated <see cref="VideoStream"/>.</returns> /// <remarks>The stream pointer has to be released after use with a call /// to ReleaseHGlobal(IntPtr).</remarks> private unsafe VideoStream ResizeBicubic(VideoStream sourceStream, Size newStreamSize) { // get image sizes int width = Math.Abs(sourceStream.StreamWidth); int height = Math.Abs(sourceStream.StreamHeight); int newWidth = newStreamSize.Width; int newHeight = newStreamSize.Height; VideoStream resizedStream = new VideoStream(); resizedStream.StreamBBP = sourceStream.StreamBBP; resizedStream.StreamHeight = newHeight; resizedStream.StreamWidth = newWidth; resizedStream.StreamStride = newWidth * sourceStream.StreamBBP; resizedStream.Position = sourceStream.Position; resizedStream.Alpha = sourceStream.Alpha; resizedStream.Buffer = null; resizedStream.BufferTimeLength = sourceStream.BufferTimeLength; resizedStream.BufferTimeStamp = sourceStream.BufferTimeLength; // allocate memory for the image resizedStream.BufferPointer = Marshal.AllocHGlobal(resizedStream.StreamStride * newHeight); int pixelSize = sourceStream.StreamBBP; int srcStride = sourceStream.StreamStride; int dstOffset = resizedStream.StreamStride - (pixelSize * newWidth); double factorX = (double)width / newWidth; double factorY = (double)height / newHeight; // do the job byte* src = (byte*)sourceStream.BufferPointer; byte* dst = (byte*)resizedStream.BufferPointer; // coordinates of source points and cooefficiens double ox, oy, dx, dy, k1, k2; int ox1, oy1, ox2, oy2; // destination pixel values double r, g, b; // width and height decreased by 1 int ymax = height - 1; int xmax = width - 1; // temporary pointer byte* p; // RGB for (int y = 0; y < newHeight; y++) { // Y coordinates oy = (double)((y * factorY) - 0.5f); oy1 = (int)oy; dy = oy - (double)oy1; for (int x = 0; x < newWidth; x++, dst += pixelSize) { // X coordinates ox = (double)((x * factorX) - 0.5f); ox1 = (int)ox; dx = ox - (double)ox1; // initial pixel value r = g = b = 0; for (int n = -1; n < 3; n++) { // get Y cooefficient k1 = this.BiCubicKernel(dy - (double)n); oy2 = oy1 + n; if (oy2 < 0) { oy2 = 0; } if (oy2 > ymax) { oy2 = ymax; } for (int m = -1; m < 3; m++) { // get X cooefficient k2 = k1 * this.BiCubicKernel((double)m - dx); ox2 = ox1 + m; if (ox2 < 0) { ox2 = 0; } if (ox2 > xmax) { ox2 = xmax; } // get pixel of original image p = src + (oy2 * srcStride) + (ox2 * pixelSize); r += k2 * p[RGBR]; g += k2 * p[RGBG]; b += k2 * p[RGBB]; } } dst[RGBR] = (byte)r; dst[RGBG] = (byte)g; dst[RGBB] = (byte)b; } dst += dstOffset; } return resizedStream; }
/// <summary> /// Core method to overlay the given video stream with its /// properties on the output stream (given by the pointer). /// </summary> /// <param name="outputDataPointer">Pointer to the output data stream</param> /// <param name="videoStream">The <see cref="VideoStream"/> to overlay on the output.</param> private unsafe void OverlayVideoStream(IntPtr outputDataPointer, VideoStream videoStream) { RectangleF streamPosition = videoStream.Position; var streamWidth = (int)Math.Abs(streamPosition.Width * this.outputStream.StreamWidth); var streamHeight = (int)Math.Abs(streamPosition.Height * this.outputStream.StreamHeight); var resizeStream = (streamWidth != Math.Abs(videoStream.StreamWidth) || streamHeight != Math.Abs(videoStream.StreamHeight)); if (videoStream.FlipY) { var top = 1 - streamPosition.Height - streamPosition.Top; streamPosition.Location = new PointF(streamPosition.Left, top); } var streamLeft = (int)Math.Abs(streamPosition.Left * this.outputStream.StreamWidth); var streamTop = (int)Math.Abs(streamPosition.Top * this.outputStream.StreamHeight); var resizedStream = videoStream; if (resizeStream) { resizedStream = this.ResizeBicubic(videoStream, new Size(streamWidth, streamHeight)); } byte* p; long r, g, b; // For each column in the area of the gaze cursor // overlay rectangle for (int y = 0; y < Math.Abs(resizedStream.StreamHeight); y++) { // Calculate the read/write positions // of the video stream at the area int src = 0; if (videoStream.FlipY) { src = y * resizedStream.StreamStride; } else { src = (resizedStream.StreamHeight - y - 1) * resizedStream.StreamStride; } int dst = (streamTop + y) * this.outputStream.StreamStride; // Jump to the correct column byte* sourcePointer = (byte*)(src + resizedStream.BufferPointer.ToInt32()); byte* destinationPointer = (byte*)(dst + outputDataPointer.ToInt32()); // Move to correct x-Position in the column // were the video overlay is placed destinationPointer += resizedStream.StreamBBP * streamLeft; // For each pixel in the column // starting at the correct position for (int x = 0; x < resizedStream.StreamWidth; x++) { // Get the ouput bytes p = &destinationPointer[0]; // calculate the transparency of the gaze cursor float transparency = resizedStream.Alpha; // merge source and gaze cursor overlay using // the gaze cursors transparency r = (long)((byte)(transparency * sourcePointer[RGBR]) + ((1 - transparency) * p[RGBR])); g = (long)((byte)(transparency * sourcePointer[RGBG]) + ((1 - transparency) * p[RGBG])); b = (long)((byte)(transparency * sourcePointer[RGBB]) + ((1 - transparency) * p[RGBB])); // Set the rgb values for the output destinationPointer[RGBR] = (byte)r; destinationPointer[RGBG] = (byte)g; destinationPointer[RGBB] = (byte)b; // Move source and output pointer according // to bbp sourcePointer += resizedStream.StreamBBP; destinationPointer += resizedStream.StreamBBP; } } if (resizeStream) { // Free resources of resized stream Marshal.FreeHGlobal(resizedStream.BufferPointer); } }
/// <summary> /// This method fills the output buffer with the video background and /// calls the overlay method for each stream. /// </summary> /// <param name="outputDataPointer">Pointer to the output data stream</param> /// <param name="outputByteCount">number of bytes in the output stream</param> /// <param name="videoStreams">The array of <see cref="VideoStream"/>s of the input pins.</param> /// <param name="backgroundColor">An AARRGGBB int with the background color.</param> private unsafe void DoOverlay(IntPtr outputDataPointer, int outputByteCount, VideoStream[] videoStreams, int backgroundColor) { // Fill area with background color Color background = Color.FromArgb(backgroundColor); byte* outputPointer = (byte*)outputDataPointer.ToInt32(); for (int i = 0; i < Math.Abs(this.outputStream.StreamHeight * this.outputStream.StreamWidth); i++) { if (this.outputStream.StreamBBP == 4) { outputPointer[RGBA] = background.A; outputPointer[RGBR] = background.R; outputPointer[RGBG] = background.G; outputPointer[RGBB] = background.B; outputPointer += 4; } else { outputPointer[RGBR] = background.R; outputPointer[RGBG] = background.G; outputPointer[RGBB] = background.B; outputPointer += 3; } } // Overlay each incoming video stream for (int j = 0; j < this.inputStreams.Length; j++) { this.OverlayVideoStream(outputDataPointer, videoStreams[j]); } }