// Calculate Optical Flow Using Farne back Algorithm public Image <Hsv, byte> CalculateOpticalFlow(Image <Gray, byte> prevFrame, Image <Gray, byte> nextFrame, int frameNumber = 0) { Image <Hsv, byte> coloredMotion = new Image <Hsv, byte>(nextFrame.Width, nextFrame.Height);//CalculateSections(nextFrame); Image <Gray, float> velx = new Image <Gray, float>(new Size(prevFrame.Width, prevFrame.Height)); Image <Gray, float> vely = new Image <Gray, float>(new Size(prevFrame.Width, prevFrame.Height)); CvInvoke.CalcOpticalFlowFarneback(prevFrame, nextFrame, velx, vely, 0.5, 3, 60, 3, 5, 1.1, OpticalflowFarnebackFlag.Default); prevFrame.Dispose(); for (int i = 0; i < coloredMotion.Width; i++) { for (int j = 0; j < coloredMotion.Height; j++) { // Pull the relevant intensities from the velx and vely matrices double velxHere = velx[j, i].Intensity; double velyHere = vely[j, i].Intensity; // Determine the color (i.e, the angle) double degrees = Math.Atan(velyHere / velxHere) / Math.PI * 90 + 45; if (velxHere < 0) { degrees += 90; } //coloredMotion.Data[j, i, 0] = (Byte)degrees; //coloredMotion.Data[j, i, 1] = 255; // Determine the intensity (i.e, the distance) double intensity = Math.Sqrt(velxHere * velxHere + velyHere * velyHere) * 10; Point p1 = new Point(i, j); Point p2 = ComputerSecondPoint(p1, degrees, intensity); if (p1.X == p2.X && p1.Y == p2.Y) { continue; } if (intensity < 2) // if distance is smaller then ignore //continue; { } if (velxHere == 0 && velyHere == 0 && degrees == 0 && intensity == 0) { continue; } this.all_lines.Add(new FeatureVectorOpticalFlow(Math.Round(velxHere, 2), Math.Round(velyHere, 2), Math.Round(degrees, 2), Math.Round(intensity, 2), new LineSegment2D(p1, p2))); //CvInvoke.Line(coloredMotion, p1, p2, new Bgr(Color.White).MCvScalar,1); //coloredMotion.Data[j, i, 2] = (intensity > 255) ? (byte)255 : (byte)intensity; } } // calculate the 9 sections and add each line to the list of respective section coloredMotion = CalculateSections(coloredMotion, frameNumber); //CvInvoke.Imwrite("C:\\Users\\Antivirus\\Desktop\\of\\opticalflow" + (frameNumber - 1) + "-" + (frameNumber) + ".png", coloredMotion); //fs.Flush(); //fs.Close(); // coloredMotion is now an image that shows intensity of motion by lightness // and direction by color. //CvInvoke.Imshow("Lightness Motion", coloredMotion); return(coloredMotion); }
void ComputeDenseOpticalFlow() { velx = new Image <Gray, float>(_lastImage.Size); vely = new Image <Gray, float>(_currentImage.Size); Image <Gray, byte> result = _currentImage.CopyBlank(); CvInvoke.CalcOpticalFlowFarneback(_lastImage, _currentImage, velx, vely, 0.5, 3, 15, 3, 6, 1.3, 0); DrawFarnerBackFlowMap(_inputImage, velx, vely, 1); }
private void VisualizeDenseFlow_Means(ref double vmag, ref double vang) { Image <Gray, float> flowX = new Image <Gray, float>(currentGray.Size); Image <Gray, float> flowY = new Image <Gray, float>(currentGray.Size); Image <Gray, float> mag = new Image <Gray, float>(currentGray.Size); Image <Gray, float> ang = new Image <Gray, float>(currentGray.Size); Image <Gray, byte> fullGray = new Image <Gray, byte>(currentImage.Size); fullGray.SetValue(new Gray(255)); if (framesProcessed >= 2) { CvInvoke.CalcOpticalFlowFarneback(lastGray, currentGray, flowX, flowY, 0.5, 3, 50, 3, 5, 1.2, OpticalflowFarnebackFlag.Default); CvInvoke.CartToPolar(flowX, flowY, mag, ang, true); CvInvoke.Threshold(mag, mag, 4, float.MaxValue, ThresholdType.ToZero); MCvScalar meanang = CvInvoke.Mean(ang.Mat, mag.Convert <Gray, byte>()); MCvScalar meanmag = CvInvoke.Mean(mag.Mat, mag.Convert <Gray, byte>()); vmag = meanmag.V0; vang = meanang.V0; if (vmag > 4) { if (minang > vang) { minang = vang; } if (maxang < vang) { maxang = vang; } } CvInvoke.Normalize(mag, mag, 0, 255, NormType.MinMax); CvInvoke.Normalize(ang, ang, 0, 255, NormType.MinMax); Image <Hsv, byte> hsv = new Image <Hsv, byte>(new Image <Gray, byte>[] { ang.Convert <Gray, byte>(), fullGray, mag.Convert <Gray, byte>() }); double x = Math.Cos(vang * (Math.PI / 180.0)); double y = Math.Sin(vang * (Math.PI / 180.0)); CvInvoke.CvtColor(hsv, currentImage, ColorConversion.Hsv2Bgr); currentImage.Erode(5); currentImage.Dilate(5); currentImage.Draw(new LineSegment2D(new Point(currentImage.Size.Width / 2, currentImage.Height / 2), new Point((int)(x * 100) + currentImage.Size.Width / 2, (int)(y * 100) + currentImage.Height / 2)), new Bgr(Color.Red), 2); } }
private static double CountDistanceThroughEdges(Mat leftEdges, Mat rightEdges, Rectangle roi, double baseLine, double focalLengthPx) { if (roi == null) { return(-1); } Mat prev8bit = leftEdges; Mat current8bit = rightEdges; Image <Gray, byte> imageLeft = prev8bit.ToImage <Gray, byte>(); Image <Gray, byte> imageRight = current8bit.ToImage <Gray, byte>(); Image <Gray, float> flowX = new Image <Gray, float>(prev8bit.Width, prev8bit.Height); Image <Gray, float> flowY = new Image <Gray, float>(prev8bit.Width, prev8bit.Height); CvInvoke.CalcOpticalFlowFarneback(imageLeft, imageRight, flowX, flowY, 0.5, 3, 25, 10, 5, 1.1, OpticalflowFarnebackFlag.FarnebackGaussian); Mat magnitude = new Mat(); Mat angle = new Mat(); CvInvoke.CartToPolar(flowX.Mat, flowY.Mat, magnitude, angle); int matWidth = magnitude.Width; int matHeight = magnitude.Height; float[] magData = new float[matWidth * matHeight]; magnitude.CopyTo(magData); List <double> results = new List <double>(); for (int x = roi.X; x <= roi.X + roi.Width; x++) { for (int y = roi.Y; y <= roi.Y + roi.Height; y++) { float delta = GetElementFromArrayAsFromMatrix(magData, y, x, matWidth); if (delta < epsilon) { continue; } double distance = (baseLine * focalLengthPx) / delta; results.Add(distance); } } if (results.Count == 0) { return(-1); } return(results.Average()); }
private void button11_Click(object sender, EventArgs e) { if (openFileDialog1.ShowDialog() == DialogResult.OK) { Image <Gray, Byte> last = null; Image <Gray, float> flowX = null; Image <Gray, float> flowY = null; Image <Gray, byte> draw = null; MKeyPoint[] keys; Accord.Video.FFMPEG.VideoFileWriter wr = new Accord.Video.FFMPEG.VideoFileWriter(); VideoProcessig videoProcessig = new VideoProcessig(openFileDialog1.FileName, (map) => { Image <Gray, Byte> image = new Image <Gray, byte>(map); if (last == null) { last = image; flowX = new Image <Gray, float>(map.Size); flowY = new Image <Gray, float>(map.Size); draw = new Image <Gray, byte>(map.Size); wr.Open("result.avi", map.Width, map.Height); return(map); } else { CvInvoke.CalcOpticalFlowFarneback(last, image, flowX, flowY, 0.5, 3, 10, 3, 5, 1.5, Emgu.CV.CvEnum.OpticalflowFarnebackFlag.Default); last = image; } CvInvoke.AccumulateSquare(flowX, flowY); CvInvoke.Canny(flowY.Convert <byte>(FloatToByte), draw, 40, 50); var result = draw.Bitmap; wr.WriteVideoFrame(result); return(result); }); videoProcessig.Show(); } byte FloatToByte(float val) { return((byte)val); } }
private void button12_Click(object sender, EventArgs e) { if (openFileDialog1.ShowDialog() == DialogResult.OK) { Image <Gray, Byte> last = null; Image <Gray, float> flowX = null; Image <Gray, float> flowY = null; Image <Gray, float> draw = null; Vector3[,] val = Procedurs.Medium(openFileDialog1.FileName); Morf morf = Morf.GenerateKMean(val, 5); morf.RemoveEmptyRegions(); List <Vector3> avrs = new List <Vector3>(); for (int i = 0; i < morf.regions.Count; i++) { avrs.Add(morf.regions[i].GetAverage(val)); } float[,] temp = new float[val.GetLength(0), val.GetLength(1)]; VideoProcessig videoProcessig = new VideoProcessig(openFileDialog1.FileName, (map) => { val.WriteRGB(map); Procedurs.MorfSubtract(morf, avrs, val, temp); Image <Gray, byte> image = new Image <Gray, byte>(map.Size); temp.RegMaximum(); for (int x = 0; x < map.Width; x++) { for (int y = 0; y < map.Height; y++) { Gray g = image[y, x]; g.Intensity = temp[x, y] * 255; image[y, x] = g; } } if (last == null) { last = image; flowX = new Image <Gray, float>(map.Size); flowY = new Image <Gray, float>(map.Size); draw = new Image <Gray, float>(map.Size); } else { CvInvoke.CalcOpticalFlowFarneback(last, image, flowX, flowY, 0.5, 3, 10, 3, 5, 1.5, Emgu.CV.CvEnum.OpticalflowFarnebackFlag.Default); last = image; } double max = 0; for (int y = 0; y < map.Height; y++) { for (int x = 0; x < map.Width; x++) { Gray g = draw[y, x]; double value = Math.Abs(flowX[y, x].Intensity) + Math.Abs(flowY[y, x].Intensity); max = Math.Max(value, max); g.Intensity = value; draw[y, x] = g; } } for (int y = 0; y < map.Height; y++) { for (int x = 0; x < map.Width; x++) { Gray g = draw[y, x]; g.Intensity /= max; g.Intensity *= 255; draw[y, x] = g; } } var result = draw.Bitmap; return(result); }); videoProcessig.Show(); } }
private void UpdateTimer_Tick(object sender, EventArgs e) { if ( false && framesProcessed == 2) { updateTimer.Stop(); return; } double rho = 0.0; double theta_deg = 0.0; perfSw.Restart(); try { framesProcessed++; //store last gray image, pull current image, convert to gray lastGray = currentGray; currentImage = capture.QueryFrame().ToImage <Bgr, Byte>(); Image <Gray, byte> grayImage = new Image <Gray, byte>(currentImage.Size); CvInvoke.CvtColor(currentImage, grayImage, ColorConversion.Bgr2Gray); //apply gaussian blur to gray to smooth out noise CvInvoke.GaussianBlur(grayImage, grayImage, new Size(15, 15), 1.8); currentGray = grayImage; //create variables to store optical flow in cart and polar coordinates Image <Gray, float> flowX = new Image <Gray, float>(currentGray.Size); Image <Gray, float> flowY = new Image <Gray, float>(currentGray.Size); Image <Gray, float> mag = new Image <Gray, float>(currentGray.Size); Image <Gray, float> ang = new Image <Gray, float>(currentGray.Size); //image with all values set to 255 Image <Gray, byte> fullGray = new Image <Gray, byte>(currentImage.Size); fullGray.SetValue(new Gray(255)); //wait until second frame to get flow if (framesProcessed >= 2) { int threshold = 2; //get flow images CvInvoke.CalcOpticalFlowFarneback(lastGray, currentGray, flowX, flowY, 0.5, 3, 20, 3, 5, 1.2, OpticalflowFarnebackFlag.Default); //convert x and y flow to magnitude and angle images CvInvoke.CartToPolar(flowX, flowY, mag, ang, true); //threshold the magnitude so we only look at vectors with motion CvInvoke.Threshold(mag, mag, threshold, 1.0, ThresholdType.Binary); //find the total number of pixels in the image that are over the threshold value MCvScalar sumMask = CvInvoke.Sum(mag); //apply the mask to the flow vectors flowX = flowX.Copy(mag.Convert <Gray, byte>()); flowY = flowY.Copy(mag.Convert <Gray, byte>()); //sum of flow vector components MCvScalar sumX = CvInvoke.Sum(flowX); MCvScalar sumY = CvInvoke.Sum(flowY); double avgX = 0.0; double avgY = 0.0; //avg of flow vector components if (sumMask.V0 > 0.0) { avgX = sumX.V0 / sumMask.V0; avgY = sumY.V0 / sumMask.V0; } //convert to polar radius (rho) and angle (theta) rho = Math.Sqrt(avgX * avgX + avgY * avgY); double theta = Math.Atan2(avgY, avgX); //convert angle from radians to degrees theta_deg = theta * 180 / Math.PI; //clamp values to bytes for HSV image visualization CvInvoke.Normalize(mag, mag, 0, 255, NormType.MinMax); CvInvoke.Normalize(ang, ang, 0, 255, NormType.MinMax); //create hue/saturation/value image to visualize magnitude and angle of flow Image <Hsv, byte> hsv = new Image <Hsv, byte>(new Image <Gray, byte>[] { ang.Convert <Gray, byte>(), fullGray, mag.Convert <Gray, byte>() }); //adding average flow components history avgXHistory.Add(avgX); avgYHistory.Add(avgY); double rho_avg = 0.0; double x_avg = 0.0; double y_avg = 0.0; int smoothLength = 3; //get average angle in the history if (avgXHistory.Count() > 0) { x_avg = avgXHistory.Median(smoothLength); y_avg = avgYHistory.Median(smoothLength); rho_avg = Math.Sqrt(x_avg * x_avg + y_avg * y_avg); } //convert hsv to bgr image for bitmap conversion CvInvoke.CvtColor(hsv, currentImage, ColorConversion.Hsv2Bgr); //draw instaneous average flow direction line if magnitude exceeds threshold if (rho > threshold) { currentImage.Draw(new LineSegment2D(new Point(currentImage.Size.Width / 2, currentImage.Height / 2), new Point((int)(avgX * 10) + currentImage.Size.Width / 2, (int)(avgY * 10) + currentImage.Height / 2)), new Bgr(Color.Red), 2); } //draw historical average flow direction line if (rho_avg > threshold) { currentImage.Draw(new LineSegment2D(new Point(currentImage.Size.Width / 2, currentImage.Height / 2), new Point((int)(x_avg * 10) + currentImage.Size.Width / 2, (int)(y_avg * 10) + currentImage.Height / 2)), new Bgr(Color.Blue), 2); } //pull bitmap from image and draw to picture box currentBitmap = currentImage.Bitmap; videoPictureBox.Image = currentBitmap; } } catch (Exception exp) { MessageBox.Show(exp.Message + " " + exp.StackTrace); } perfSw.Stop(); fps = ((double)framesProcessed / fpsSw.Elapsed.TotalSeconds); statusLabel.Text = framesProcessed.ToString() + " totalms: " + fpsSw.ElapsedMilliseconds.ToString() + " fps: " + ((double)framesProcessed / fpsSw.Elapsed.TotalSeconds).ToString() + " perfms: " + perfSw.ElapsedMilliseconds + " vmag: " + rho + " vang: " + theta_deg; }
private void OpticalFlowEvent(ImageProcess sender, Mat mat) { if (cnt == 0) { //cnt ==0, clone mat for previous mat prevMat = mat.Clone(); cnt++; return; } Image <Emgu.CV.Structure.Gray, byte> prev_img = prevMat.ToImage <Emgu.CV.Structure.Gray, byte>(); Image <Emgu.CV.Structure.Gray, byte> curr_img = mat.ToImage <Emgu.CV.Structure.Gray, byte>(); Mat flow = new Mat(prev_img.Height, prev_img.Width, DepthType.Cv32F, 2); CvInvoke.CalcOpticalFlowFarneback(prev_img, curr_img, flow, 0.5, 3, 15, 3, 6, 1.3, 0); Mat[] flow_parts = new Mat[2]; flow_parts = flow.Split(); Mat magnitude = new Mat(), angle = new Mat(), magn_norm = new Mat(); CvInvoke.CartToPolar(flow_parts[0], flow_parts[1], magnitude, angle, true); CvInvoke.Normalize(magnitude, magn_norm, 0.0, 1.0, NormType.MinMax); /* * //start drawing * float factor = (float)((1.0 / 360.0) * (180.0 / 255.0)); * Mat colorAngle = angle * factor; * //angle *= ((1f / 360f) * (180f / 255f)); * //build hsv image * Mat[] _hsv= new Mat[3]; * Mat hsv = new Mat(); * Mat hsv8 = new Mat(); * Mat bgr = new Mat(); * _hsv[0] = colorAngle; * _hsv[1] = Mat.Ones(colorAngle.Height,angle.Width,DepthType.Cv32F,1); * _hsv[2] = magn_norm; * VectorOfMat vm = new VectorOfMat(_hsv); * CvInvoke.Merge(vm, hsv); * hsv.ConvertTo(hsv8,DepthType.Cv8U, 255.0); * CvInvoke.CvtColor(hsv8, bgr,ColorConversion.Hsv2Bgr); * bgr.Save("opticalFlow.bmp"); */ List <double> offset = CalculateDirection(angle, magn_norm); Console.WriteLine(offset[0] + " , " + offset[1]); //test direction /*if( Math.Abs(offset[0]) >Math.Abs(offset[1])) * { * * if (offset[0] > 0 ) * { * Console.WriteLine("Right"); * } * else * { * Console.WriteLine("Left"); * } * * } * else * { * * if (offset[1] > 0) * { * Console.WriteLine("Down"); * } * else * { * Console.WriteLine("Up"); * } * * }*/ prevMat = mat.Clone(); }
private void Process(Mat frame) { var imageHSV = frame.ToImage <Hsv, byte>(); var imageRGB = frame.ToImage <Rgb, byte>(); var channels = imageRGB.Split(); var blue = channels[0]; var green = channels[1]; var red = channels[2]; //var colorSeparated = red - green; //red //var colorSeparated = green - red; //blue //var colorSeparated = blue - green; //purple //var colorSeparated = blue - red; //blue //var colorSeparated = red - blue; //yellow (!blue) //var colorSeparated = green - blue; //green/yellow (!blue/!red) //var colorSeparated = blue - ((red - blue) + (green - blue)); ////blue colour by exclusion var blue1 = (green - blue) + (red - blue); blue1 = blue1.Not(); blue1 = blue1.ThresholdToZero(new Gray(250)); var blue2 = (green - red) + (blue - red); blue2 = blue2.InRange(new Gray(80), new Gray(255)); var colorSeparated = blue1 & blue2; //var green1 = green.InRange(new Gray(100), new Gray(255)) & blue.InRange(new Gray(0), new Gray(100)) & red.InRange(new Gray(0), new Gray(100)); //var colorSeparated = green1; //var red1 = red - blue; //var colorSeparated = red1; var img = colorSeparated; img = img.Erode(3); img = img.Dilate(3); img = img.SmoothGaussian(7); img = img.Dilate(3); img = img.Erode(3); img = img.InRange(new Gray(50), new Gray(255)); var result = img.HoughCircles(new Gray(150), new Gray(50), 2.2, 50, 15, 120); foreach (var circles in result) { foreach (var circle in circles) { img.Draw(circle, new Gray(255), 3); } } //_viewers[1].Image = blue2; //_viewers[2].Image = img; var yellow1 = (green - blue) + (red - blue); yellow1 = yellow1.Erode(5); yellow1 = yellow1.Dilate(5); yellow1 = yellow1.SmoothGaussian(7); yellow1 = yellow1.Dilate(5); yellow1 = yellow1.Erode(5); yellow1 = yellow1.SmoothGaussian(7); yellow1 = yellow1.ThresholdBinary(new Gray(120), new Gray(255)); //var lineSegments = yellow1.HoughLines(50, 250, 2.5, Math.PI / 45.0, 20, 30, 40); //var segmentCount = 0; //foreach (var lineSegment in lineSegments) //{ // Rgb color; // switch (segmentCount) // { // case 0: // color = new Rgb(255, 0, 0); // break; // case 1: // color = new Rgb(0, 255, 0); // break; // case 2: // color = new Rgb(255, 255, 0); // break; // case 3: // color = new Rgb(255, 0, 255); // break; // default: // color = new Rgb(255, 255, 255); // break; // } // foreach (var line in lineSegment) // { // imageRGB.Draw(line, color, 3, LineType.FourConnected); // } // segmentCount++; //} //using (var contoursDetected = new VectorOfVectorOfPoint()) //{ // CvInvoke.FindContours( // yellow1, // contoursDetected, // null, // RetrType.List, // ChainApproxMethod.ChainApproxNone); // var rects = new List<PointF[]>(); // for (var i = 0; i < contoursDetected.Size; i++) // { // using (var contour = contoursDetected[i]) // { // using (var approxContour = new VectorOfPoint()) // { // CvInvoke.ApproxPolyDP( // contour, // approxContour, // CvInvoke.ArcLength(contour, true) * 0.05, // true); // var numberOfCorners = approxContour.Size; // if (numberOfCorners == 4) // { // var contourArea = CvInvoke.ContourArea(approxContour); // if (contourArea > 500) // { // var rect = new PointF[4]; // for (var cornerIndex = 0; cornerIndex < numberOfCorners; cornerIndex++) // { // rect[cornerIndex] = new PointF(approxContour[cornerIndex].X, approxContour[cornerIndex].Y); // } // rects.Add(rect); // } // } // foreach (var rect in rects) // { // for (var cornerIndex = 0; cornerIndex < 4; cornerIndex++) // { // imageRGB.Draw(cornerIndex.ToString(), new Point((int)rect[cornerIndex].X, (int)rect[cornerIndex].Y), FontFace.HersheyPlain, 1.5D, new Rgb(Color.SpringGreen), 2); // imageRGB.Draw(new CircleF(new PointF(rect[cornerIndex].X, rect[cornerIndex].Y), 5), new Rgb(Color.Magenta), 2); // } // var src = new[] { rect[0], rect[1], rect[2], rect[3] }; // var dst = new[] { new PointF(0, 0), new PointF(0, 400), new PointF(400, 400), new PointF(400, 0) }; // var tmp1 = new UMat(); // var matrix = CvInvoke.GetPerspectiveTransform(src, dst); // CvInvoke.WarpPerspective(imageRGB, tmp1, matrix, new Size(400, 400)); // _viewers[2].Image = tmp1; // } // } // } // //var contoursArray = new List<Point[]>(); // //using (var currContour = contoursDetected[i]) // //{ // // var pointsArray = new Point[currContour.Size]; // // for (var j = 0; j < currContour.Size; j++) // // { // // pointsArray[j] = currContour[j]; // // } // // contoursArray.Add(pointsArray); // //} // } // //foreach (var contourPoints in contoursArray) // //{ // // yellow1.Draw(contourPoints, new Gray(128), 3, LineType.FourConnected); // //} //} var frameImage = frame.ToPipelineImage(); var frameOneColorImage = frameImage.IsolateColorBlackByAverage(80); var scaleDownFactor = 5F; var scaledDownFrameOneColorImage = frameOneColorImage.Resize(1D / scaleDownFactor, Inter.Cubic); if (_scaledDownFrameOneColorImagePrev != null) { var horizontalWindowCount = 5; var verticalWindowCount = 5; var averageRegionFlow = new PointF[horizontalWindowCount, verticalWindowCount]; var flowResult = new Mat(); CvInvoke.CalcOpticalFlowFarneback(_scaledDownFrameOneColorImagePrev, scaledDownFrameOneColorImage, flowResult, 0.5, 3, 15, 5, 1, 1.2, OpticalflowFarnebackFlag.Default); var flowResultChannels = flowResult.Split(); var flowResultX = flowResultChannels[0]; var flowResultY = flowResultChannels[1]; var flowWindowHeight = (float)Math.Ceiling(flowResult.Rows / (float)verticalWindowCount); var flowWindowWidth = (float)Math.Ceiling(flowResult.Cols / (float)horizontalWindowCount); var flowWindowVerticalIndexCounter = 0; var flowWindowRowCounter = 0; for (var r = 0; r < flowResult.Rows; r++) { var flowWindowHorizontalIndexCounter = 0; var flowWindowColCounter = 0; var horizontalLineFlowTotal = new PointF(0, 0); for (var c = 0; c < flowResult.Cols; c++) { var xyValues = new float[2]; Marshal.Copy(flowResultX.DataPointer + (((r * flowResultX.Cols) + c) * flowResultX.ElementSize), xyValues, 0, 1); Marshal.Copy(flowResultY.DataPointer + (((r * flowResultY.Cols) + c) * flowResultY.ElementSize), xyValues, 1, 1); var xShift = xyValues[0]; var yShift = xyValues[1]; if (flowWindowColCounter >= flowWindowWidth || c == flowResult.Cols - 1) { averageRegionFlow[flowWindowHorizontalIndexCounter, flowWindowVerticalIndexCounter].X += horizontalLineFlowTotal.X; averageRegionFlow[flowWindowHorizontalIndexCounter, flowWindowVerticalIndexCounter].Y += horizontalLineFlowTotal.Y; horizontalLineFlowTotal = new PointF(0, 0); flowWindowHorizontalIndexCounter++; flowWindowColCounter = 0; } horizontalLineFlowTotal.X += xShift; horizontalLineFlowTotal.Y += yShift; flowWindowColCounter++; } if (flowWindowRowCounter >= flowWindowHeight || r == flowResult.Rows - 1) { flowWindowVerticalIndexCounter++; flowWindowRowCounter = 0; } flowWindowRowCounter++; } for (var j = 0; j < verticalWindowCount; j++) { for (var i = 0; i < horizontalWindowCount; i++) { averageRegionFlow[i, j].X /= flowWindowHeight * flowWindowWidth; averageRegionFlow[i, j].Y /= flowWindowHeight * flowWindowWidth; var sx = (int)(i * flowWindowWidth + flowWindowWidth / 2); var sy = (int)(j * flowWindowHeight + flowWindowHeight / 2); imageRGB.Draw(new LineSegment2D(new Point(sx, sy), new Point(sx + (int)averageRegionFlow[i, j].X, sy + (int)averageRegionFlow[i, j].Y)), new Rgb(Color.LawnGreen), 2); } } } _scaledDownFrameOneColorImagePrev = scaledDownFrameOneColorImage; //var featuresDetector = new Emgu.CV.Features2D.GFTTDetector(); //var kp = featuresDetector.Detect(frameOneColorImage); //foreach (var keyPoint in kp) //{ // frameOneColorImage.Draw(new Ellipse(keyPoint.Point, new SizeF(2, 2), 0), new Gray(255), 3); //} //var cornerImage = new Image<Gray, float>(yellow1.Size); //yellow1.CornerHarris(yellow1, cornerImage, 11, 11, 0.05, BorderType.Reflect); //cornerImage = cornerImage.ThresholdBinary(new Gray(0.005), new Gray(255)); //_viewers[1].Image = frameOneColorImageFiltered; //for (var i = 0; i < 3; i++) //{ // var img = channels[i] & mask; // img = img.Erode(3); // img = img.Dilate(3); // img = img.Dilate(3); // img = img.Erode(3); // img = img.SmoothGaussian(7); // var result = img.HoughCircles(new Gray(150), new Gray(50), 2.2, 50, 15, 100); // foreach (var circles in result) // { // foreach (var circle in circles) // { // img.Draw(circle, new Gray(255), 3); // } // } // _viewers[i].Image = img; //} }
// calculates the optical flow according to the Farneback algorithm public Bitmap Dense_Optical_Flow(Bitmap bmp, OpticalFlowVariable optiVariables, Camera cam) { frameReduction = optiVariables.frameReduction < 1 ? 1 : optiVariables.frameReduction; // frame becomes previous frame (i.e., prev_frame stores information about current frame) prev_frame = matframe; Image <Bgr, Byte> imageCV = new Image <Bgr, byte>(bmp); //Image Class from Emgu.CV matframe = imageCV.Mat; //This is your Image converted to Mat if (prev_frame == null) { return(bmp); } // frame_nr increment by number of steps given in textfield on user interface frame_nr += 1; // intialize this Image Matrix before resizing (see below), so it remains at original size img_average_vectors = new Image <Bgr, byte>(matframe.Width, matframe.Height); orig_height = matframe.Height; Size n_size = new Size(matframe.Width / frameReduction, matframe.Height / frameReduction); // Resize frame and previous frame (smaller to reduce processing load) //Source Mat matFramDst = new Mat(); using (GpuMat gMatSrc = new GpuMat()) using (GpuMat gMatDst = new GpuMat()) { gMatSrc.Upload(matframe); Emgu.CV.Cuda.CudaInvoke.Resize(gMatSrc, gMatDst, new Size(0, 0), (double)1 / frameReduction, (double)1 / frameReduction); gMatDst.Download(matFramDst); } matframe = matFramDst; if (prev_frame.Height != matframe.Height) { return(bmp); } // images that are compared during the flow operations (see below) // these need to be greyscale images Image <Gray, Byte> prev_grey_img, curr_grey_img; prev_grey_img = new Image <Gray, byte>(prev_frame.Width, prev_frame.Height); curr_grey_img = new Image <Gray, byte>(matframe.Width, matframe.Height); // Image arrays to store information of flow vectors (one image array for each direction, which is x and y) Image <Gray, float> flow_x; Image <Gray, float> flow_y; flow_x = new Image <Gray, float>(matframe.Width, matframe.Height); flow_y = new Image <Gray, float>(matframe.Width, matframe.Height); // assign information stored in frame and previous frame in greyscale images (works without convert function) CvInvoke.CvtColor(matframe, curr_grey_img, ColorConversion.Bgr2Gray); CvInvoke.CvtColor(prev_frame, prev_grey_img, ColorConversion.Bgr2Gray); // Apply Farneback dense optical flow // parameters are the two greyscale images (these are compared) // and two image arrays storing the flow information // the results of the procedure are stored // the rest of the parameters are: // pryScale: specifies image scale to build pyramids: 0.5 means that each next layer is twice smaller than the former // levels: number of pyramid levels: 1 means no extra layers // winSize: the average window size; larger values = more robust to noise but more blur // iterations: number of iterations at each pyramid level // polyN: size of pixel neighbourhood: higher = more precision but more blur // polySigma // flags CvInvoke.CalcOpticalFlowFarneback(prev_grey_img, curr_grey_img, flow_x, flow_y, 0.5, 3, 10, 3, 6, 1.3, 0); // call function that shows results of Farneback algorithm Image <Bgr, Byte> farnebackImg = Draw_Farneback_flow_map(matframe.ToImage <Bgr, Byte>(), flow_x, flow_y, optiVariables);// given in global variables section // Release memory prev_grey_img.Dispose(); curr_grey_img.Dispose(); flow_x.Dispose(); flow_y.Dispose(); //return farnebackImg.ToBitmap(); Image <Bgra, Byte> alphaImgShape = new Image <Bgra, byte>(imageCV.Size.Width, imageCV.Size.Height, new Bgra(0, 0, 0, .5)); CvInvoke.AddWeighted(alphaImgShape, .5, BlackTransparent(farnebackImg), .5, 0, alphaImgShape); Mat alphaimg = new Mat(); CvInvoke.CvtColor(imageCV, alphaimg, ColorConversion.Bgr2Bgra); if (CudaInvoke.HasCuda) { using (GpuMat gMatSrc = new GpuMat()) using (GpuMat gMatSrc2 = new GpuMat()) using (GpuMat gMatDst = new GpuMat()) { gMatSrc.Upload(alphaimg); gMatSrc2.Upload(alphaImgShape); CudaInvoke.AlphaComp(gMatSrc, gMatSrc2, gMatDst, AlphaCompTypes.Plus); gMatDst.Download(alphaimg); } return(alphaimg.Bitmap); } else { return(Overlay(imageCV, alphaImgShape).ToBitmap()); } }
/// <summary> /// get the motion influence map from the processed frames. /// </summary> /// <param name="vid">Path of the video.</param> /// <param name="xBlockSize">Horizontal size of the block of the block.</param> /// <param name="yBlockSize">Vertical size of the block of the block.</param> /// <param name="noOfRowInBlock">'Number of rows of the mega grid.</param> /// <param name="noOfColInBlock">Number of columns of the mega gird.</param> /// <param name="total_frames">Total frames that we will process.</param> /// <param name="clustering">Boolean to determinate wherher to cluster later or not</param> /// <param name="frame_nr">Number of the starting frame.</param> /// <returns>Motion influence map.</returns> public List <double[][][]> get_motion_influence_map(String vid, out int xBlockSize, out int yBlockSize, out int noOfRowInBlock, out int noOfColInBlock, out int total_frames, bool clustering, int frame_nr = 0) { List <double[][][]> ret = new List <double[][][]>(); xBlockSize = 0; yBlockSize = 0; noOfRowInBlock = 0; noOfColInBlock = 0; total_frames = 0; try { mag = new Mat(); ang = new Mat(); frame = new Mat(); prev_frame = new Mat(); cap = new VideoCapture(vid); if (!clustering) { total_frames = 3; } else { total_frames = Convert.ToInt32(cap.GetCaptureProperty(CapProp.FrameCount)); } cap.SetCaptureProperty(CapProp.PosFrames, frame_nr); frame = cap.QueryFrame(); prev_frame = frame; } catch (NullReferenceException except) { Console.WriteLine(except.Message); } int mm = 0; Console.WriteLine("Total Frames : {0}", total_frames); while (mm < total_frames - 1) { Console.WriteLine("Frame : " + frame_nr); prev_frame = frame; frame_nr += 1; cap.SetCaptureProperty(CapProp.PosFrames, frame_nr); frame = cap.QueryFrame(); Image <Gray, Byte> prev_grey_img = new Image <Gray, byte>(frame.Width, frame.Height); Image <Gray, Byte> curr_grey_img = new Image <Gray, byte>(frame.Width, frame.Height); Image <Gray, float> flow_x = new Image <Gray, float>(frame.Width, frame.Height); Image <Gray, float> flow_y = new Image <Gray, float>(frame.Width, frame.Height); curr_grey_img = frame.ToImage <Gray, byte>(); prev_grey_img = prev_frame.ToImage <Gray, Byte>(); CvInvoke.CalcOpticalFlowFarneback(prev_grey_img, curr_grey_img, flow_x, flow_y, 0.5, 3, 15, 3, 6, 1.3, 0); CvInvoke.CartToPolar(flow_x, flow_y, mag, ang); OptFlowOfBlocks.calcOptFlowOfBlocks(mag, ang, curr_grey_img, out opFlowOfBlocks, out centreOfBlocks, out rows, out cols, out noOfRowInBlock, out noOfColInBlock, out xBlockSize, out yBlockSize, out blockSize); motionInMapGenerator(opFlowOfBlocks, blockSize, centreOfBlocks, xBlockSize, yBlockSize, out motionInfVal); ret.Add(motionInfVal); mm++; } return(ret); }
// Optical Flow Implementation public void OpticalFlow() { /* Image<Gray, byte> output = new Image<Gray, byte>(bgImage.Width, bgImage.Height); * BackgroundSubtractorMOG2 bgsubtractor = new BackgroundSubtractorMOG2(varThreshold: 100, shadowDetection: false); * bgsubtractor.Apply(bgImage, output); * bgsubtractor.Apply(img, output); * * //output.Canny(100,100); * * CvInvoke.Erode(output, output, null, new System.Drawing.Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar)); * CvInvoke.Dilate(output, output, null, new System.Drawing.Point(-1, -1), 5, BorderType.Reflect, default(MCvScalar)); * * // finding the Bounding Box of the Person * frm = new PersonFrame(); * Rectangle rec = frm.findBoundry(output); * //output.ROI = rec; * pictureViewBox.Image = output;*/ // prep containers for x and y vectors Image <Gray, float> velx = new Image <Gray, float>(new Size(img.Width, img.Height)); Image <Gray, float> vely = new Image <Gray, float>(new Size(img.Width, img.Height)); ///previousFrame.ROI = rec; //nextFrame.ROI = rec; // use the optical flow algorithm. CvInvoke.CalcOpticalFlowFarneback(previousFrame, nextFrame, velx, vely, 0.5, 3, 60, 3, 5, 1.1, OpticalflowFarnebackFlag.Default); //spictureViewBox.Image = flowMatrix; // color each pixel Image <Hsv, Byte> coloredMotion = new Image <Hsv, Byte>(new Size(img.Width, img.Height)); for (int i = 0; i < coloredMotion.Width; i++) { for (int j = 0; j < coloredMotion.Height; j++) { // Pull the relevant intensities from the velx and vely matrices double velxHere = velx[j, i].Intensity; double velyHere = vely[j, i].Intensity; // Determine the color (i.e, the angle) double degrees = Math.Atan(velyHere / velxHere) / Math.PI * 90 + 45; if (velxHere < 0) { degrees += 90; } coloredMotion.Data[j, i, 0] = (Byte)degrees; coloredMotion.Data[j, i, 1] = 255; // Determine the intensity (i.e, the distance) double intensity = Math.Sqrt(velxHere * velxHere + velyHere * velyHere) * 10; coloredMotion.Data[j, i, 2] = (intensity > 255) ? (byte)255 : (byte)intensity; } } // coloredMotion is now an image that shows intensity of motion by lightness // and direction by color. pictureViewBox.Image = coloredMotion; previousFrame.Dispose(); previousFrame = img.Clone(); img.Dispose(); nextFrame.Dispose(); coloredMotion.Dispose(); }