/// <summary> /// Called by the video player to handle the next video frame. /// </summary> private void videoPlayer1_NextFrame(OpenCVDotNet.UI.VideoPlayer sender, OpenCVDotNet.UI.NextFrameEventArgs args) { // args.Frame contains the frame to be handled. CVImage frame = args.Frame; // go over all the pixels (rows, cols) for (int y = 0; y < frame.Height; ++y) { for (int x = 0; x < frame.Width; ++x) { CVRgbPixel pixel = frame[y, x]; // invert RGB colors. frame[y, x] = new CVRgbPixel( (byte)(255 - pixel.R), (byte)(255 - pixel.G), (byte)(255 - pixel.B)); } } // assign resulting frame to picture box as a bitmap. pictureBox1.Image = frame.ToBitmap(); writer.WriteFrame(frame); }
public System.Drawing.Bitmap ToBitmap() { #region Initialize Bitmap And Pixel Format PixelFormat pixelFormat; if (this.Depth == CVDepth.Depth8U) { switch (this.Channels) { case 1: pixelFormat = PixelFormat.Format8bppIndexed; break; case 3: pixelFormat = PixelFormat.Format24bppRgb; break; case 4: pixelFormat = PixelFormat.Format32bppArgb; break; default: throw new NotImplementedException("Format is not supported."); } } else { throw new NotImplementedException("Format is not supported."); } Bitmap result = new Bitmap(this.Width, this.Height, pixelFormat); #endregion BitmapData resultData = result.LockBits(new Rectangle(Point.Empty, new Size(this.Width, this.Height)), ImageLockMode.WriteOnly, pixelFormat); unsafe { byte *pWrite = (byte *)resultData.Scan0; int cols = this.Width; int rows = this.Height; if (Bitmap.IsAlphaPixelFormat(pixelFormat)) { for (int row = 0; row < rows; ++row, pWrite += resultData.Stride - cols * Channels) { for (int col = 0; col < cols; ++col, pWrite += Channels) { // TODO: Improve performance CVRgbPixel c = this[row, col]; pWrite[0] = c.B; pWrite[1] = c.G; pWrite[2] = c.R; pWrite[3] = 255; } } } else { for (int row = 0; row < rows; ++row, pWrite += resultData.Stride - cols * Channels) { for (int col = 0; col < cols; ++col, pWrite += Channels) { // TODO: Improve performance CVRgbPixel c = this[row, col]; pWrite[0] = c.B; pWrite[1] = c.G; pWrite[2] = c.R; } } } } result.UnlockBits(resultData); //BitmapData resultData = result.LockBits(new Rectangle(Point.Empty, new Size(this.Width, this.Height)), ImageLockMode.WriteOnly, pixelFormat); //__CvMatPtr stubMat = PInvoke.cvCreateMat(1, 1, 0); //__CvMatPtr dstMat = PInvoke.cvCreateMat(1, 1, 0); //__CvMatPtr imageMat; //__CvArrPtr arrPtr = this.Array; //int origin = 0; //unsafe { // if (PInvoke.CV_IS_IMAGE_HDR(arrPtr)) { // origin = (new __IplImagePtr(arrPtr.ptr)).ToPointer()->origin; // } //} //imageMat = PInvoke.cvGetMat(arrPtr, stubMat); //IntPtr hBitmapData = resultData.Scan0; //PInvoke.cvInitMatHeader(dstMat, this.Height, this.Width, PInvoke.CV_MAKETYPE(8, 3), hBitmapData, (this.Width * this.Channels + 3) & -4); //unsafe //{ // PInvoke.cvConvertImage(image, dstMat.ptr, // Internal.ToPointer()->origin == 1 ? (int)CVConvertImageFlags.Flip : 0); //} //result.UnlockBits(resultData); // TODO: Realese resources... // Setting Pallete //if (pixelFormat == PixelFormat.Format8bppIndexed) //{ // for (byte i = 0; i <= 255; ++i) result.Palette.Entries[i] = Color.FromArgb(i, i, i); //} return(result); //throw new NotImplementedException(); //__CvSize size = new __CvSize(0,0); //int channels = 0; //IntPtr dst_ptr = IntPtr.Zero; //const int channels0 = 3; //int origin = 0; //__CvMatPtr stub = PInvoke.cvCreateMat(1,1,0), dst; //IntPtr image; //bool changed_size = false; // philipg ////HDC hdc = CreateCompatibleDC(0); //IntPtr hdc = CreateCompatibleDC(IntPtr.Zero); //__CvArrPtr arr = this.Array; //if (PInvoke.CV_IS_IMAGE_HDR(arr)) origin = ((__IplImage*) arr.ptr.ToPointer())->origin; //image = PInvoke.cvGetMat(arr, &stub); //byte buffer = new byte[sizeof(BITMAPINFO) + 255*sizeof(RGBQUAD)]; ////BITMAPINFO* binfo = (BITMAPINFO*)buffer; //size.cx = image->width; //size.cy = image->height; //channels = channels0; //FillBitmapInfo(binfo, size.cx, size.cy, channels*8, 1); //HBITMAP hBitmap = CreateDIBSection(hdc, binfo, DIB_RGB_COLORS, &dst_ptr, 0, 0); //if (hBitmap == NULL) // return nullptr; //PInvoke.cvInitMatHeader(&dst, size.cy, size.cx, CV_8UC3, dst_ptr, (size.cx * channels + 3) & -4); //PInvoke.cvConvertImage(image, &dst, origin == 0 ? CV_CVTIMG_FLIP : 0); //System.Drawing.Bitmap^ bmpImage = System.Drawing.Image::FromHbitmap(IntPtr(hBitmap)); //DeleteObject(hBitmap); //DeleteDC(hdc); //return bmpImage; }
private void ProcessBackgroundSubtraction() { CVImage frame = videoPlayer.LastFrame; // access last frame from video player. for (int row = 0; row < frame.Height; ++row) { for (int col = 0; col < frame.Width; ++col) { CVRgbPixel pixel = frame[row, col]; // get black and white value for this pixel. int bwValue = pixel.BwValue; // accumulate the new value to the moving average. double val = (double)bwValue / 255.0; if (bgAccum[row, col] == -1) { bgAccum[row, col] = val; } else { bgAccum[row, col] = alpha * val + (1 - alpha) * bgAccum[row, col]; } // calculate the diff between the frame and the average up until now. double delta = val - bgAccum[row, col]; byte currentBgValue = bgFrame[row, col].BwValue; if (currentBgValue > 0) { currentBgValue -= STEP_SIZE; } // if delta is smaller than the threshold, eliminate the background. if (delta < threshold) { // set the color of the background frame to black. //bgFrame[row, col] = new CVRgbPixel(0, 0, 0); } else { currentBgValue = 255; } bgFrame[row, col] = new CVRgbPixel(currentBgValue, currentBgValue, currentBgValue); } } // display the updated frame on the window. bs.Image = bgFrame.ToBitmap(); }