public void TrackRedVedio(String videoFile, String outputFile, String targetFile) { VideoCapture capture = new VideoCapture(videoFile); Mat image = new Mat(); OpenCvSharp.CPlusPlus.Size dsize = new OpenCvSharp.CPlusPlus.Size(capture.FrameWidth, capture.FrameHeight); VideoWriter writer = new VideoWriter(outputFile, FourCC.MJPG, fps, dsize, true); int k = 0; List <string> targetList = new List <string>(); while (capture.Read(image)) { if (k % 1 == 0) { double[] target; Mat res = TrackR(image, out target); writer.Write(res); targetList.Add(k + " " + target[0] + " " + target[1]); } k++; // if (k == 100) // break; } ; writer.Release(); Console.WriteLine(k); IOTools.WriteListToTxt(targetList, targetFile); }
public static Mat ApplyGaussBlur(Mat src, OpenCvSharp.CPlusPlus.Size s) { Mat dst = new Mat(); if (s == null) { dst = ApplyGaussBlur(src); } else { if (src != null) { if (s.Height % 2 == 0) { s.Height += 1; } if (s.Width % 2 == 0) { s.Width += 1; } Cv2.GaussianBlur(src, dst, s, 0); } } return(dst); }
//테스트 데이터 한장의 데이터 시핑 public double [] TestDataShipping(int dataNumber) { try { Mat mat_cat = Cv2.ImRead(Property.testDataPath + Convert.ToString(dataNumber) + ").bmp", LoadMode.Color); Bitmap [] bitmaps = new Bitmap[1]; List <double> temp_1 = new List <double>(); double[] temp_2; byte[] temp01; OpenCvSharp.CPlusPlus.Size size = new OpenCvSharp.CPlusPlus.Size(32, 32); mat_cat = mat_cat.Resize(size, 0, 0, Interpolation.Linear); bitmaps[0] = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_cat); temp01 = imageToByteArray(bitmaps[0]); temp01.ToList <byte>().ForEach(b => temp_1.Add(Convert.ToDouble(b))); temp_2 = temp_1.ToArray <double>(); return(temp_2); } catch { MessageBox.Show("picture 폴더 test (" + Convert.ToString(dataNumber) + ") 포멧 확인"); double[] temp_2 = null; return(temp_2); } }
private Mat preprocessShape(OpenCvSharp.CPlusPlus.Rect shapeRect, Mat sceneMat) { var matRect = new OpenCvSharp.CPlusPlus.Rect(0, 0, sceneMat.Width, sceneMat.Height); shapeRect.Inflate((int)(shapeRect.Width * 0.1), (int)(shapeRect.Height * 0.1)); shapeRect = shapeRect.Intersect(matRect); Mat shapeMat = sceneMat.SubMat(shapeRect); var size = new OpenCvSharp.CPlusPlus.Size(128, 128); shapeMat = shapeMat.Resize(size); return(shapeMat); }
public void DrawDLInVedio(String videoFile, String outputFile, String targetFile, String indexFile, int fileoffset, Scalar color) { VideoCapture capture = new VideoCapture(videoFile); Mat image = new Mat(); OpenCvSharp.CPlusPlus.Size dsize = new OpenCvSharp.CPlusPlus.Size(capture.FrameWidth, capture.FrameHeight); VideoWriter writer = new VideoWriter(outputFile, FourCC.MJPG, fps, dsize, true); int k = 0; List <string> targetList = IOTools.ReadListFromTxt(targetFile); List <string> indexList = IOTools.ReadListFromTxt(indexFile); int t = 0; String status = " "; while (capture.Read(image)) { String[] ss = targetList[t + fileoffset].Split(' '); if (t < indexList.Count - 2 && k == int.Parse(indexList[t + 1])) { t++; } Cv2.Circle(image, (int)double.Parse(ss[0]), (int)double.Parse(ss[1]), 10, color, 2 ); // Cv2.PutText(image, status, // new Point((int)double.Parse(ss[0]) - 5, (int)double.Parse(ss[1])), FontFace.Italic, 1, color, 3); if (t > 5) { String[] ssPre = targetList[t + fileoffset - 5].Split(' '); if (double.Parse(ss[1]) - double.Parse(ssPre[1]) < -25) { status = "Pick Up"; } else if (double.Parse(ss[1]) - double.Parse(ssPre[1]) > 30) { status = "Put Down"; } else if (Math.Abs(double.Parse(ss[1]) - double.Parse(targetList[fileoffset].Split(' ')[1])) < 10) { status = " "; } } //if (double.Parse(ss[1]) < 420) writer.Write(image); k++; } ; writer.Release(); }
public void DrawParticlesInVedio(String videoFile, String outputFile, String particlefile, String resultFile) { VideoCapture capture = new VideoCapture(videoFile); Mat image = new Mat(); OpenCvSharp.CPlusPlus.Size dsize = new OpenCvSharp.CPlusPlus.Size(capture.FrameWidth, capture.FrameHeight); VideoWriter writer = new VideoWriter(outputFile, FourCC.MJPG, fps, dsize, true); int k = 0; List <string> targetList = IOTools.ReadListFromTxt(resultFile); List <string> particleList = IOTools.ReadListFromTxt(particlefile); int t = 0; String status = " "; while (capture.Read(image)) { if (t >= targetList.Count) { break; } String[] ss = targetList[t].Split(' '); for (int i = 0; i < 100; i++) { String[] pp = particleList[t * 100 + i].Split(' '); Cv2.Circle(image, (int)double.Parse(pp[0]), (int)double.Parse(pp[1]), 1, new Scalar(255, 0, 0), 2 ); } Cv2.Circle(image, (int)double.Parse(ss[0]), (int)double.Parse(ss[1]), 10, new Scalar(255, 255, 0), 2 ); // Cv2.PutText(image, status, // new Point((int)double.Parse(ss[0]) - 5, (int)double.Parse(ss[1])), FontFace.Italic, 1, color, 3); //if (double.Parse(ss[1]) < 420) writer.Write(image); t++; } ; writer.Release(); }
private byte[] ImageCompleted(MemoryStream ms) { Mat src = OpenCvSharp.Extensions.BitmapConverter.ToMat(new System.Drawing.Bitmap(ms)); Mat gray = new Mat(); CascadeClassifier haarCascade = new CascadeClassifier("./haarcascade_frontalface_default.xml"); var result = src.Clone(); Cv2.CvtColor(src, gray, ColorConversion.BgrToGray); // 顔検出 OpenCvSharp.CPlusPlus.Rect[] faces = haarCascade.DetectMultiScale( gray); // 検出した顔の位置に円を描画 foreach (OpenCvSharp.CPlusPlus.Rect face in faces) { var center = new OpenCvSharp.CPlusPlus.Point { X = (int)(face.X + face.Width * 0.5), Y = (int)(face.Y + face.Height * 0.5) }; var axes = new OpenCvSharp.CPlusPlus.Size { Width = (int)(face.Width * 0.5), Height = (int)(face.Height * 0.5) }; Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 2); } System.Drawing.Bitmap dstBitmap = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(result); byte[] imageData = null; using (MemoryStream ms2 = new MemoryStream()) { dstBitmap.Save(ms2, System.Drawing.Imaging.ImageFormat.Jpeg); imageData = ms2.GetBuffer(); } return(imageData); }
public void recordCamera() { isRecordingCamera = true; int k = 0; Thread.Sleep(100); new Thread(() => { IplImage img; // recordFrames.TryPeek(out img); OpenCvSharp.CPlusPlus.Size dsize = new OpenCvSharp.CPlusPlus.Size(cap.FrameWidth, cap.FrameHeight); VideoWriter writer = new VideoWriter(Conf.filepath + "1.avi", FourCC.MJPG, fps, dsize, true); StreamWriter sw = new StreamWriter(Conf.filepath + "t.txt"); while (true) { Mat gray = new Mat(); Mat canny = new Mat(); Mat dst = new Mat(); try { bool suc = recordFrames.TryDequeue(out img); if (suc) { Mat frame = new Mat(img, true); string time; recordTimes.TryDequeue(out time); // Console.WriteLine(DateTime.Now.ToString("==yyyy-MM-dd-HH-mm-ss-fffffff")); //Cv2.CvtColor(frame, gray, ColorConversion.BgrToGray); //Cv2.Canny(gray, canny, 100, 180); //Cv2.Resize(canny, dst, dsize, 0, 0, Interpolation.Linear); // Write mat to VideoWriter writer.Write((frame)); sw.WriteLine(time); k++; } else { Console.WriteLine("ffff"); if (isRecordingCamera == false) { break; } } //stop = true; Thread.Sleep(1000 / fps - 10); } catch (Exception e) { label.Content = e.ToString(); } } Console.WriteLine(k); writer.Release(); sw.Close(); return; // writer.Dispose(); // cap.Dispose(); }).Start(); }
//데이터 시핑 public void DataLoop(string dataType, bool firstNumber) { // 임시변수 int firstDataNumber = 0; int dataNumber = 0; int trueData = 0; int AlldataNumber = AlldataNumber = Property.Truedatanumber + Property.FalseDataNumber;; double[][] input = new double[AlldataNumber][]; string Path = ""; switch (dataType) { case "True": #region Path = Directory.GetCurrentDirectory() + @"\true\true ("; if (!firstNumber) { firstDataNumber = Property.TrueDataHalf; dataNumber = Property.Truedatanumber; trueData = 0; } else { firstDataNumber = 0; dataNumber = Property.TrueDataHalf; trueData = 0; } break; #endregion case "False": #region Path = Directory.GetCurrentDirectory() + @"\false\false ("; if (!firstNumber) { firstDataNumber = Property.FalseDataHalf; dataNumber = Property.FalseDataNumber; trueData = Property.Truedatanumber; } else { firstDataNumber = 0; dataNumber = Property.FalseDataHalf; trueData = Property.Truedatanumber; } break; #endregion //보류 #region /* * case "Test": * * Path = Property.testDataPath; * AlldataNumber = Property.FalseDataNumber; * * if (!firstNumber) * { * firstDataNumber = Property.FalseDataHalf; * dataNumber = Property.FalseDataNumber; * } * else * { * firstDataNumber = 0; * dataNumber = Property.FalseDataHalf; * } * break; * */ #endregion } for (int i = firstDataNumber; i < dataNumber; i++) { #region try { Bitmap[] bitmaps = new Bitmap[dataNumber]; List <double> temp_1 = new List <double>(); double[] temp_2; byte[] temp01; Mat mat_cat; mat_cat = Cv2.ImRead(Path + (i + 1) + ").bmp", LoadMode.Color); OpenCvSharp.CPlusPlus.Size size = new OpenCvSharp.CPlusPlus.Size(32, 32); mat_cat = mat_cat.Resize(size, 0, 0, Interpolation.Linear); bitmaps[i] = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_cat); temp01 = imageToByteArray(bitmaps[i]); temp01.ToList <byte>().ForEach(b => temp_1.Add(Convert.ToDouble(b))); temp_2 = temp_1.ToArray <double>(); input[i + trueData] = temp_2; // 메모리 해제 mat_cat.Dispose(); temp01 = null; temp_2 = null; bitmaps.Initialize(); temp_1.Clear(); if (i == dataNumber - 1) { Property.inputs = input; input = null; } } #endregion #region catch { switch (dataType) { #region case "True": MessageBox.Show("데이터 학습 실패 \ntrue " + (i + 1) + " 폴더에 이미지 형식 확인"); break; case "False": MessageBox.Show("데이터 학습 실패 \nfalse " + (i + 1) + "폴더에 이미지 형식 확인"); break; /* * case "Test": * MessageBox.Show("데이터 학습 실패 \ntest " + (i + 1) + "폴더에 이미지 형식 확인"); * break; */ #endregion } } #endregion } }
private void detectShapeCandidates(ref Bitmap bitmap, Boolean saveShapes) { Debug.WriteLine("Running OpenCV"); string myPhotos = Environment.GetFolderPath(Environment.SpecialFolder.MyPictures); Mat colorMat = BitmapConverter.ToMat(bitmap); MatOfDouble mu = new MatOfDouble(); MatOfDouble sigma = new MatOfDouble(); Cv2.MeanStdDev(colorMat, mu, sigma); double mean = mu.GetArray(0, 0)[0]; mu.Dispose(); sigma.Dispose(); Mat greyMat = new Mat(); Cv2.CvtColor(colorMat, greyMat, ColorConversion.BgraToGray, 0); greyMat = greyMat.GaussianBlur(new OpenCvSharp.CPlusPlus.Size(1, 1), 5, 5, BorderType.Default); greyMat = greyMat.Canny(0.5 * mean, 1.2 * mean, 3, true); Mat contourMat = new Mat(greyMat.Size(), colorMat.Type()); greyMat.CopyTo(contourMat); var contours = contourMat.FindContoursAsArray(ContourRetrieval.List, ContourChain.ApproxSimple); for (int j = 0; j < contours.Length; j++) { var poly = Cv2.ApproxPolyDP(contours[j], 0.01 * Cv2.ArcLength(contours[j], true), true); int num = poly.Length; if (num >= 4 && num < 20) { var color = Scalar.Blue; var rect = Cv2.BoundingRect(poly); if (rect.Height < 20 || rect.Width < 20) { continue; } if (saveShapes) { string path = Path.Combine(myPhotos, "shape_samples"); path = Path.Combine(path, "shape_sample_" + Path.GetRandomFileName() + ".png"); var matRect = new OpenCvSharp.CPlusPlus.Rect(0, 0, greyMat.Width, greyMat.Height); rect.Inflate((int)(rect.Width * 0.1), (int)(rect.Height * 0.1)); rect = rect.Intersect(matRect); Mat shapeMat = greyMat.SubMat(rect); var size = new OpenCvSharp.CPlusPlus.Size(128, 128); shapeMat = shapeMat.Resize(size); Bitmap shape = shapeMat.ToBitmap(); shape.Save(path); shape.Dispose(); shapeMat.Dispose(); continue; } Cv2.Rectangle(colorMat, rect, color, 2); } } bitmap = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(colorMat); colorMat.Dispose(); greyMat.Dispose(); contourMat.Dispose(); }
public KinectPanel() { base.TitleName = "Kinect Condition"; kinect = KinectSensor.GetDefault(); //Mat this.bodyindexMat = new Mat(); this.depthMat = new Mat(); this.colorimageMat = new Mat(); //bodyindex関連 this.bodyIndexFrameDesc = kinect.DepthFrameSource.FrameDescription; this.bodyIndexBuffer = new byte[bodyIndexFrameDesc.LengthInPixels]; this.bodyIndexColorImage = new WriteableBitmap(bodyIndexFrameDesc.Width, bodyIndexFrameDesc.Height, 96, 96, PixelFormats.Bgra32, null); this.bodyIndexColorRect = new Int32Rect(0, 0, bodyIndexFrameDesc.Width, bodyIndexFrameDesc.Height); this.bodyIndexColorStride = (int)(bodyIndexFrameDesc.Width * bodyIndexColorBytesPerPixel); this.bodyIndexColorBuffer = new byte[bodyIndexFrameDesc.LengthInPixels * bodyIndexColorBytesPerPixel]; bodyIndexColors = new System.Windows.Media.Color[]{ Colors.Red, Colors.Blue, Colors.Green, Colors.Yellow, Colors.Pink, Colors.Purple, }; bodyIndexFrameReader = kinect.BodyIndexFrameSource.OpenReader(); bodyIndexFrameReader.FrameArrived += bodyIndexFrameReader_FrameArrived; //depth関連 this.depthFrameReader = this.kinect.DepthFrameSource.OpenReader(); this.depthFrameReader.FrameArrived += DepthFrame_Arrived; this.depthFrameDescription = this.kinect.DepthFrameSource.FrameDescription; this.depthBuffer = new ushort[this.depthFrameDescription.LengthInPixels]; this.depthImageWidth = this.depthFrameDescription.Width; this.depthImageHeight = this.depthFrameDescription.Height; this.depthImage = new WriteableBitmap(depthFrameDescription.Width, depthFrameDescription.Height, 96, 96, PixelFormats.Gray16, null); this.depthRect = new Int32Rect(0, 0, depthFrameDescription.Width, depthFrameDescription.Height); this.depthStride = (int)(depthFrameDescription.Width * depthFrameDescription.BytesPerPixel); //colorimage this.colorImageFormat = ColorImageFormat.Bgra; this.colorFrameDescription = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat); this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader(); this.colorFrameReader.FrameArrived += ColorFrame_Arrived; this.colors = new byte[this.colorFrameDescription.Width * this.colorFrameDescription.Height * this.colorFrameDescription.BytesPerPixel]; this.imageWidth = this.colorFrameDescription.Width; this.imageHeight = this.colorFrameDescription.Height; this.colorimage = new WriteableBitmap(this.colorFrameDescription.Width, this.colorFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null); this.calibImg = new WriteableBitmap(this.colorFrameDescription.Width, this.colorFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null); this.bitmapRect = new Int32Rect(0, 0, this.colorFrameDescription.Width, this.colorFrameDescription.Height); this.bitmapStride = this.colorFrameDescription.Width * (int)this.colorFrameDescription.BytesPerPixel; //bone this.bodyFrameReader = this.kinect.BodyFrameSource.OpenReader(); this.bodyFrameReader.FrameArrived += BodyFrame_Arrived; this.bodies = new Body[this.kinect.BodyFrameSource.BodyCount]; //bodycountに骨格情報の数 this.size = new OpenCvSharp.CPlusPlus.Size(512, 424); this.size1 = new OpenCvSharp.CPlusPlus.Size(imageWidth, imageHeight); //this.bodyindexMat = new Mat(size, MatType.CV_8UC1); //this.depthMat = bodyindexMat.Clone(); //this.colorimageMat = new Mat(size1, MatType.CV_8UC3); stump = 0; }
/// <summary> /// 指定したサイズ・型の2次元の行列として初期化 /// </summary> /// <param name="size">2次元配列のサイズ: Size(cols, rows) . /// Size コンストラクタでは,行数と列数が逆順になっていることに注意してください.</param> #else /// <summary> /// constructs 2D matrix of the specified size and type /// </summary> /// <param name="size">2D array size: Size(cols, rows) . In the Size() constructor, /// the number of rows and the number of columns go in the reverse order.</param> #endif public MatOfInt4(Size size) : base(size, ThisType) { }
/// <summary> /// 指定したサイズ・型の2次元の行列で、要素をスカラー値で埋めて初期化 /// </summary> /// <param name="size"> 2 次元配列のサイズ: Size(cols, rows) . Size() コンストラクタでは,行数と列数が逆順になっていることに注意してください.</param> /// <param name="s">各行列要素を初期化するオプション値.初期化の後ですべての行列要素を特定の値にセットするには, /// コンストラクタの後で,SetTo(Scalar value) メソッドを利用してください.</param> #else /// <summary> /// constucts 2D matrix and fills it with the specified Scalar value. /// </summary> /// <param name="size">2D array size: Size(cols, rows) . In the Size() constructor, /// the number of rows and the number of columns go in the reverse order.</param> /// <param name="s">An optional value to initialize each matrix element with. /// To set all the matrix elements to the particular value after the construction, use SetTo(Scalar s) method .</param> #endif public MatOfInt4(Size size, int s) : base(size, ThisType, s) { }