// ========================================================= /// <summary> /// This function will display markings at specified locations on the image offset from the center of the image /// </summary> public void DrawMarks(ref Bitmap image, List <PointF> points, Color color, int size) { try { using (Image <Bgr, Byte> img = new Image <Bgr, byte>(image)) { foreach (var pt in points) { PointF p = new PointF(pt.X + videoCapture.FrameCenterX, videoCapture.FrameCenterY - pt.Y); var cross = new Cross2DF(p, size, size); img.Draw(cross, new Bgr(color), 2); } image = img.ToBitmap(); } } catch { } }
void ComputeDenseOpticalFlow() { faceGrayImage = grayFrame; faceNextGrayImage = nextGrayFrame; // Compute dense optical flow using Horn and Schunk algo velx = new Image <Gray, float>(faceGrayImage.Size); vely = new Image <Gray, float>(faceNextGrayImage.Size); OpticalFlow.HS(faceGrayImage, faceNextGrayImage, true, velx, vely, 0.1d, new MCvTermCriteria(100)); #region Dense Optical Flow Drawing Size winSize = new Size(10, 10); vectorFieldX = (int)Math.Round((double)faceGrayImage.Width / winSize.Width); vectorFieldY = (int)Math.Round((double)faceGrayImage.Height / winSize.Height); sumVectorFieldX = 0f; sumVectorFieldY = 0f; vectorField = new PointF[vectorFieldX][]; for (int i = 0; i < vectorFieldX; i++) { vectorField[i] = new PointF[vectorFieldY]; for (int j = 0; j < vectorFieldY; j++) { Gray velx_gray = velx[j * winSize.Width, i *winSize.Width]; float velx_float = (float)velx_gray.Intensity; Gray vely_gray = vely[j * winSize.Height, i *winSize.Height]; float vely_float = (float)vely_gray.Intensity; sumVectorFieldX += velx_float; sumVectorFieldY += vely_float; vectorField[i][j] = new PointF(velx_float, vely_float); Cross2DF cr = new Cross2DF( new PointF((i * winSize.Width) + trackingArea.X, (j * winSize.Height) + trackingArea.Y), 1, 1); opticalFlowFrame.Draw(cr, new Bgr(Color.Red), 1); LineSegment2D ci = new LineSegment2D( new Point((i * winSize.Width) + trackingArea.X, (j * winSize.Height) + trackingArea.Y), new Point((int)((i * winSize.Width) + trackingArea.X + velx_float), (int)((j * winSize.Height) + trackingArea.Y + vely_float))); opticalFlowFrame.Draw(ci, new Bgr(Color.Yellow), 1); } } #endregion }
void DrawDenseOpticalFlow() { int winSizeX = 2; int winSizeY = 2; vectorFieldX = (int)Math.Round((double)_currentTrackedGrayImage.Width / winSizeX); vectorFieldY = (int)Math.Round((double)_currentTrackedGrayImage.Height / winSizeY); sumVectorFieldX = 0f; sumVectorFieldY = 0f; vectorField = new PointF[vectorFieldX][]; for (int i = 0; i < vectorFieldX; i++) { vectorField[i] = new PointF[vectorFieldY]; for (int j = 0; j < vectorFieldY; j++) { Gray velx_gray = velx[j * winSizeX, i *winSizeX]; float velx_float = (float)velx_gray.Intensity; Gray vely_gray = vely[j * winSizeY, i *winSizeY]; float vely_float = (float)vely_gray.Intensity; sumVectorFieldX += velx_float; sumVectorFieldY += vely_float; vectorField[i][j] = new PointF(velx_float, vely_float); Cross2DF cr = new Cross2DF( new PointF((i * winSizeX) + _trackingArea.X, (j * winSizeY) + _trackingArea.Y), 1, 1); _opticalFlowFrame.Draw(cr, new Bgr(Color.Red), 1); LineSegment2D ci = new LineSegment2D( new Point((i * winSizeX) + _trackingArea.X, (j * winSizeY) + _trackingArea.Y), new Point((int)((i * winSizeX) + _trackingArea.X + velx_float), (int)((j * winSizeY) + _trackingArea.Y + vely_float))); _opticalFlowFrame.Draw(ci, new Bgr(Color.Yellow), 1); } } }
public void Draw(string a) { if (_show && _ForShow != null) { LineSegment2D ci = new LineSegment2D(); Cross2DF cr = new Cross2DF(); switch (a) { case "Average": int ii = vectorFieldX / 2; int jj = vectorFieldY / 2; SetImgROI(_ForShow, _ROI_center, _ROI_size); cr = new Cross2DF(new PointF(ii * _winSize.Width, jj * _winSize.Height), 2, 2); _ForShow.Draw(cr, new Bgr(Color.Red), 5); float vecval = 20; if (Direction == 1) { ci = new LineSegment2D(new Point(ii * _winSize.Width, jj * _winSize.Height), new Point((int)((ii * _winSize.Width) + 0), (int)((jj * _winSize.Height) - vecval))); } if (Direction == 2) { ci = new LineSegment2D(new Point(ii * _winSize.Width, jj * _winSize.Height), new Point((int)((ii * _winSize.Width) + vecval), (int)((jj * _winSize.Height) - vecval))); } if (Direction == 3) { ci = new LineSegment2D(new Point(ii * _winSize.Width, jj * _winSize.Height), new Point((int)((ii * _winSize.Width) + vecval), (int)((jj * _winSize.Height) + 0))); } if (Direction == 4) { ci = new LineSegment2D(new Point(ii * _winSize.Width, jj * _winSize.Height), new Point((int)((ii * _winSize.Width) + vecval), (int)((jj * _winSize.Height) + vecval))); } if (Direction == 5) { ci = new LineSegment2D(new Point(ii * _winSize.Width, jj * _winSize.Height), new Point((int)((ii * _winSize.Width) + 0), (int)((jj * _winSize.Height) + vecval))); } if (Direction == 6) { ci = new LineSegment2D(new Point(ii * _winSize.Width, jj * _winSize.Height), new Point((int)((ii * _winSize.Width) - vecval), (int)((jj * _winSize.Height) + vecval))); } if (Direction == 7) { ci = new LineSegment2D(new Point(ii * _winSize.Width, jj * _winSize.Height), new Point((int)((ii * _winSize.Width) - vecval), (int)((jj * _winSize.Height) + 0))); } if (Direction == 8) { ci = new LineSegment2D(new Point(ii * _winSize.Width, jj * _winSize.Height), new Point((int)((ii * _winSize.Width) - vecval), (int)((jj * _winSize.Height) - vecval))); } _ForShow.Draw(ci, new Bgr(Color.Blue), 2); CvInvoke.cvResetImageROI(_ForShow); break; case "Rejected": int iii = vectorFieldX / 2; int jjj = vectorFieldY / 2; SetImgROI(_ForShow, _ROI_center, _ROI_size); cr = new Cross2DF(new PointF(iii * _winSize.Width, jjj * _winSize.Height), 1, 1); _ForShow.Draw(cr, new Bgr(Color.Red), 20); CvInvoke.cvResetImageROI(_ForShow); break; } } }
static void Main(string[] args) { String apiKey = "847e6315f892e21449da5f4077c5104f"; String apiSecret = "BmskojfFyrZVQhkLfNSnRzX-lK8musO6"; FaceService faceService = new FaceService(apiKey, apiSecret); string filePath = "D:\\Codes\\datasets\\face_morph\\bbt.jpg"; DetectResult detectResult = faceService.Detection_DetectImg(filePath); Image <Bgr, Byte> srcImg = new Image <Bgr, Byte>(filePath); for (int cnt = 0; cnt < detectResult.face.Count; cnt++) { string pointFileName = String.Format("D:\\Codes\\datasets\\face_morph\\result_bbt_face_{0}.txt", cnt); FileStream fileStream = new FileStream(pointFileName, FileMode.Create); StreamWriter streamWriter = new StreamWriter(fileStream); Rectangle faceRect = new Rectangle( (int)(detectResult.face[cnt].position.center.x * srcImg.Width / 100 - detectResult.face[cnt].position.width * srcImg.Width * 0.5 / 100), (int)(detectResult.face[cnt].position.center.y * srcImg.Height / 100 - detectResult.face[cnt].position.height * srcImg.Height * 0.5 / 100), (int)detectResult.face[cnt].position.width * srcImg.Width / 100, (int)detectResult.face[cnt].position.height * srcImg.Height / 100); Image <Bgr, byte> faceImg = srcImg.GetSubRect(faceRect); string fileName = String.Format("D:\\Codes\\datasets\\face_morph\\result_bbt_face_{0}.jpg", cnt); faceImg.Save(fileName); IList <FaceppSDK.Point> featurePoints = new List <FaceppSDK.Point>(); //featurePoints.Add(detectResult.face[cnt].position.center); FaceppSDK.Point tempPoint1 = new FaceppSDK.Point(); tempPoint1.x = (detectResult.face[cnt].position.eye_left.x - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width; tempPoint1.y = (detectResult.face[cnt].position.eye_left.y - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height; featurePoints.Add(tempPoint1); FaceppSDK.Point tempPoint2 = new FaceppSDK.Point(); tempPoint2.x = (detectResult.face[cnt].position.eye_right.x - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width; tempPoint2.y = (detectResult.face[cnt].position.eye_right.y - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height; featurePoints.Add(tempPoint2); FaceppSDK.Point tempPoint3 = new FaceppSDK.Point(); tempPoint3.x = (detectResult.face[cnt].position.mouth_left.x - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width; tempPoint3.y = (detectResult.face[cnt].position.mouth_left.y - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height; featurePoints.Add(tempPoint3); FaceppSDK.Point tempPoint4 = new FaceppSDK.Point(); tempPoint4.x = (detectResult.face[cnt].position.mouth_right.x - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width; tempPoint4.y = (detectResult.face[cnt].position.mouth_right.y - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height; featurePoints.Add(tempPoint4); FaceppSDK.Point tempPoint5 = new FaceppSDK.Point(); tempPoint5.x = (detectResult.face[cnt].position.nose.x - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width; tempPoint5.y = (detectResult.face[cnt].position.nose.y - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height; featurePoints.Add(tempPoint5); foreach (FaceppSDK.Point featurePoint in featurePoints) { streamWriter.WriteLine(featurePoint.x.ToString()); streamWriter.WriteLine(featurePoint.y.ToString()); System.Drawing.PointF point = new System.Drawing.PointF((float)featurePoint.x * srcImg.Width / 100, (float)featurePoint.y * srcImg.Height / 100); Cross2DF cross = new Cross2DF(point, (float)3.0, (float)3.0); srcImg.Draw(cross, new Bgr(0, 255, 0), 3); } streamWriter.Flush(); streamWriter.Close(); fileStream.Close(); //srcImg.Save("D:\\Codes\\datasets\\face_morph\\result_bbt.jpg"); } }