private void BrowseButton_Click(object sender, RoutedEventArgs e) { // When the user clicks on the "Load Video" button OpenFileDialog loadVideoDialog = new OpenFileDialog(); loadVideoDialog.Filter = "Video files (*.avi, *.mkv, *.mp4) | *.avi; *.mkv; *.mp4"; loadVideoDialog.ShowDialog(); try { capturedVideo = new VideoCapture(loadVideoDialog.FileName); if (loadVideoDialog.FileName != "") { LoadVideoInit(); originalFrame = new Mat(); thresholdedFrame = new Mat(); mog = new BackgroundSubtractorMOG(kernelHistory, kernelMixtures, kernelBackgroundRatio, kernelNoiseSigma); mog2 = new BackgroundSubtractorMOG2(kernelHistory, kernelThreshold, false); capturedVideo.ImageGrabbed += ProcessVideo; capturedVideo.Start(); } else { return; } } catch (Exception err) { System.Windows.MessageBox.Show("Something went wrong!\n" + err.ToString(), "Error!", MessageBoxButton.OK, MessageBoxImage.Asterisk); } loadVideoDialog.Dispose(); }
// Use this for initialization void Start() { fpsMonitor = GetComponent <FpsMonitor> (); webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> (); #if UNITY_ANDROID && !UNITY_EDITOR // Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, pixel 2) // https://forum.unity.com/threads/android-webcamtexture-in-low-light-only-some-models.520656/ // https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178 rearCameraRequestedFPS = webCamTextureToMatHelper.requestedFPS; if (webCamTextureToMatHelper.requestedIsFrontFacing) { webCamTextureToMatHelper.requestedFPS = 15; webCamTextureToMatHelper.Initialize(); } else { webCamTextureToMatHelper.Initialize(); } #else webCamTextureToMatHelper.Initialize(); #endif backgroundSubstractorMOG2 = Video.createBackgroundSubtractorMOG2(); // backgroundSubstractorMOG2.setHistory (2); // backgroundSubstractorMOG2.setVarThreshold (16); // backgroundSubstractorMOG2.setDetectShadows (true); }
public Filter(DeviceModel device, InferenceServer inferenceServer) { _device = device; _inferenceServer = inferenceServer; _inferenceChannel = inferenceServer.CreateChannel(); _segmentor = BackgroundSubtractorMOG2.Create(500, 16, true); }
public void BackgroundSubtractor_Example() { VideoCapture capture = new VideoCapture(0); using var MOG2 = BackgroundSubtractorMOG2.Create(); using var MOG = BackgroundSubtractorMOG.Create(); using var GMG = BackgroundSubtractorGMG.Create(); using var KNN = BackgroundSubtractorKNN.Create(); using Mat frame = new Mat(); using Mat MOG2remove = new Mat(); using Mat MOGremove = new Mat(); using Mat GMG2remove = new Mat(); using Mat KNNremove = new Mat(); Window win_MOG2 = new Window("MOG2"); Window win_GMG = new Window("GMG"); Window win_MOG = new Window("MOG"); Window win_KNN = new Window("KNN"); while (Cv2.WaitKey(1) < 0) { capture.Read(frame); MOG2.Apply(frame, MOG2remove); MOG.Apply(frame, MOGremove); GMG.Apply(frame, GMG2remove); KNN.Apply(frame, KNNremove); win_MOG2.ShowImage(MOG2remove); win_GMG.ShowImage(MOGremove); win_MOG.ShowImage(GMG2remove); win_KNN.ShowImage(KNNremove); } }
// Background Subtraction From the Given Background and Input Image public void removebackground(string filepath = null) { CvInvoke.Imshow("1- Background Image", bgImage); CvInvoke.Imshow("2- Forground Image", img); Image <Gray, byte> output = new Image <Gray, byte>(bgImage.Width, bgImage.Height); BackgroundSubtractorMOG2 bgsubtractor = new BackgroundSubtractorMOG2(varThreshold: 100, shadowDetection: false); bgsubtractor.Apply(bgImage, output); bgsubtractor.Apply(img, output); pictureViewBox.Image = output; CvInvoke.Imshow("3- Background Subtracted", output); //output.Canny(100,100); CvInvoke.Erode(output, output, null, new System.Drawing.Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar)); CvInvoke.Imshow("4- After Applying Erode", output); CvInvoke.Dilate(output, output, null, new System.Drawing.Point(-1, -1), 5, BorderType.Reflect, default(MCvScalar)); CvInvoke.Imshow("5- After Dilation", output); // Write the Silhoutte output to the file if (filepath != null && saveResults == true) { CvInvoke.Imwrite(outputFolder + "bg_subtract_" + filepath, output); } // finding the Bounding Box of the Person frm = new PersonFrame(); Rectangle rec = frm.findBoundry(output); // Using Thinning Algorithm on Silhoutte Image <Gray, byte> thinOutput = new Image <Gray, byte>(output.Width, output.Height); XImgprocInvoke.Thinning(output, thinOutput, ThinningTypes.ZhangSuen); pictureViewBox.Image = thinOutput.Not().Not(); CvInvoke.Imshow("6- After Thinning Zhang Suen", thinOutput); // Write the thinned Image to the file if (filepath != null && saveResults == true) { CvInvoke.Imwrite(outputFolder + "thinned_" + filepath, thinOutput.Not().Not()); } // drawing bounding Box of the person CvInvoke.Rectangle(thinOutput, rec, new Rgb(Color.White).MCvScalar, 2); CvInvoke.Imshow("Person Bounding Box", thinOutput); // drawing the middle line of the Person //CvInvoke.Line(thinOutput, frm.middle_line.p1, frm.middle_line.p2, new Rgb(Color.White).MCvScalar, 2); // Display the Image //CvInvoke.Imshow("Person Fame", thinOutput); // Applying Hough Line Transformation Hough(thinOutput, filepath); img.Dispose(); output.Dispose(); thinOutput.Dispose(); }
public frmSideCamera() { InitializeComponent(); getCamera(); descriptor.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector()); motionDetector = new MotionDetector(new TwoFramesDifferenceDetector(), new MotionAreaHighlighting()); fgDetector = new BackgroundSubtractorMOG2(); }
private static void BackgroundSubtractorSample() { var mog = BackgroundSubtractorMOG2.Create(); var mask = new Mat(); var colorImage = Cv2.ImRead(@"data\shapes.png"); mog.Apply(colorImage, mask, 0.01); Window.ShowImages(colorImage, mask); }
public void Apply() { using (var mog = BackgroundSubtractorMOG2.Create()) using (var src = Image("Data/Image/lenna.png")) using (var dst = new Mat()) { mog.Apply(src, dst); } }
public void mapear() { mDetector = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2(); mBlobDetector = new CvBlobDetector(); _capture = new Capture(mNomeDoArquivo); //_capture = new Capture(); inicializarKalman(); Application.Idle += ProcessFrame; }
// Use this for initialization void Start() { webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> (); webCamTextureToMatHelper.Init(); backgroundSubstractorMOG2 = Video.createBackgroundSubtractorMOG2(); // backgroundSubstractorMOG2.setHistory (2); // backgroundSubstractorMOG2.setVarThreshold (16); // backgroundSubstractorMOG2.setDetectShadows (true); }
void Start() { webCamTexture = new WebCamTexture(WebCamTexture.devices[0].name); webCamTexture.Play(); // Avoid using new keyword in Update(), esp. with Mat and Texture2D tex = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); mat = fg = new Mat(webCamTexture.height, webCamTexture.width, MatType.CV_8UC4); nm = new Mat(); mog2 = BackgroundSubtractorMOG2.Create(200, 16, true); }
public Form1() { InitializeComponent(); detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); float flot = 15; bgs = new BackgroundSubtractorMOG2(30, flot, false); mv = new MCvMoments(); }
// Use this for initialization void Start() { fpsMonitor = GetComponent <FpsMonitor> (); webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> (); #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); backgroundSubstractorMOG2 = Video.createBackgroundSubtractorMOG2(); // backgroundSubstractorMOG2.setHistory (2); // backgroundSubstractorMOG2.setVarThreshold (16); // backgroundSubstractorMOG2.setDetectShadows (true); }
public void CheckProperties() { using (var mog = BackgroundSubtractorMOG2.Create()) { mog.BackgroundRatio = mog.BackgroundRatio; mog.ComplexityReductionThreshold = mog.ComplexityReductionThreshold; mog.DetectShadows = mog.DetectShadows; mog.History = mog.History; mog.NMixtures = mog.NMixtures; mog.ShadowThreshold = mog.ShadowThreshold; mog.ShadowValue = mog.ShadowValue; mog.VarInit = mog.VarInit; mog.VarMax = mog.VarMax; mog.VarMin = mog.VarMin; mog.VarThreshold = mog.VarThreshold; mog.VarThresholdGen = mog.BackgroundRatio; } }
protected override void inicializarVariaveis() { base.inicializarVariaveis(); mDetector = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2(); mBlobDetector = new CvBlobDetector(); Size vTamanhoDasImagens = mImagemColorida.Size; mImagemCinzaSemPlanoDeFundo = new Mat(); mImagemDoPlanoDeFundo = new Mat(vTamanhoDasImagens.Width, vTamanhoDasImagens.Height, DepthType.Cv32F, 3); mImagemSemPlanoDeFundo = null;// = cvCreateImage(gTamanhoDaImagem, IPL_DEPTH_32F, 3);; mCopiaImagemPlanoDeFundo = null; mImagemBinariaSemPlanoDeFundo = new Mat(); vHist = new Emgu.CV.UI.HistogramBox(); vHist.Show(); vHist.Visible = true; mPrimeiraExecucao = true; dicionarioMonitores = new Dictionary <int, MonitorDePessoa>(); dicionarioBlobs = new Dictionary <int, MCvBlob>(); }
//test image from https://play.google.com/store/apps/details?id=com.vizalevgames.finddifferences200levels //thank for that static void Main() { var org1 = new Mat("1.jpg"); var org2 = new Mat("2.jpg"); //create background subtraction method var mog = BackgroundSubtractorMOG2.Create(); var mask = new Mat(); mog.Apply(org1, mask); mog.Apply(org2, mask); //reduce noise Cv2.MorphologyEx(mask, mask, MorphTypes.Open, null, null, 2); //convert mask from gray to BGR for AddWeighted function var maskBgr = new Mat(); Cv2.CvtColor(mask, maskBgr, ColorConversionCodes.GRAY2BGR); //apply two image as one Cv2.AddWeighted(org1, 1.0, maskBgr, 0.5, 2.2, org1); Cv2.AddWeighted(org2, 1.0, maskBgr, 0.5, 2.2, org2); #region draw contours var canny = new Mat(); Cv2.Canny(mask, canny, 15, 120); Cv2.FindContours(canny, out var contours, out var _, RetrievalModes.External, ContourApproximationModes.ApproxSimple); Cv2.DrawContours(org1, contours, -1, Scalar.Red, 2); Cv2.DrawContours(org2, contours, -1, Scalar.Red, 2); #endregion using (new Window("org1", org1)) using (new Window("org2", org2)) using (new Window("mask", mask)) { Cv2.WaitKey(); } }
//called from video player public void initOpenCV(Texture2D texture) { //Input01.text = "50.0"; //Input01ValueChanged (); //maxSpeed = 20.0f; numBins = 24; Slider01.value = 200; slider1updated(); Slider02.value = 0.1f; slider2updated(); Slider03.value = 40; slider3updated(); Slider04.value = 20.0f; slider4updated(); backgroundSubstractorMOG2 = Video.createBackgroundSubtractorMOG2(); matOpFlowThis = new Mat(); matOpFlowPrev = new Mat(); MOPcorners = new MatOfPoint(); mMOP2fptsThis = new MatOfPoint2f(); mMOP2fptsPrev = new MatOfPoint2f(); mMOP2fptsSafe = new MatOfPoint2f(); mMOBStatus = new MatOfByte(); mMOFerr = new MatOfFloat(); //resize video window Debug.Log("Screen width=" + Screen.width); Debug.Log("Screen height=" + Screen.height); Debug.Log("Texture width=" + texture.width); //outputImage.GetComponent<RectTransform> ().rect.width = Screen.width; //outputImage.GetComponent<RectTransform>().sizeDelta = new Vector2(texture.width, texture.height); MatDisplayx2 = new Mat(openCVCreateMat.rgbMat.height() * 4, openCVCreateMat.rgbMat.width() * 4, CvType.CV_8UC4); ResizeWindow(texture); }
public CameraTracking(int subtractionHistory, int subtractionThreshold, int frameBlurStrength, int largestDetectionHeightSizeDivisor, int largestDetectionWidthSizeDivisor, int smallestDetectionHeightSizeDivisor, int smallestDetectionWidthSizeDivisor) { Debug.WriteLine("CameraTracking:: Initializing"); if (largestDetectionHeightSizeDivisor > smallestDetectionHeightSizeDivisor || largestDetectionWidthSizeDivisor > smallestDetectionWidthSizeDivisor) { throw new Exception("The large detection divisors should be smaller then the smallest detection divisors!"); } this.frameBlurStrength = frameBlurStrength; this.largestDetectionHeightSizeDivisor = largestDetectionHeightSizeDivisor; this.largestDetectionWidthSizeDivisor = largestDetectionWidthSizeDivisor; this.smallestDetectionHeightSizeDivisor = smallestDetectionHeightSizeDivisor; this.smallestDetectionWidthSizeDivisor = smallestDetectionWidthSizeDivisor; try { CameraTracking._cameraCapture = new VideoCapture(); // I had to set this by hand to match our camera as opencv doesn't always pull these properties correctly // and sometimes shows funky frames or nothing at all // CameraTracking._cameraCapture.SetCaptureProperty(CapProp.FrameWidth, 1600); // CameraTracking._cameraCapture.SetCaptureProperty(CapProp.FrameHeight, 1200); // CameraTracking._cameraCapture.SetCaptureProperty(CapProp.FourCC, Emgu.CV.VideoWriter.Fourcc('Y', 'U', 'Y', '2')); CameraTracking._fgDetector = new Emgu.CV.BackgroundSubtractorMOG2(subtractionHistory, subtractionThreshold); CameraTracking._blobDetector = new CvBlobDetector(); CameraTracking._tracker = new CvTracks(); this.Ready = true; Debug.WriteLine("CameraTracking:: Camera Initialized"); } catch (Exception e) { throw new Exception("Unable to initialize the webcam!"); } }
private void ProcImage3(ref System.Drawing.Bitmap src, ref System.Drawing.Bitmap srcB, out System.Drawing.Bitmap dst) { dst = null; Mat srcImg = BitmapConverter.ToMat(src); Cv2.CvtColor(srcImg, srcImg, ColorConversionCodes.BGRA2BGR); Mat srcImgB = BitmapConverter.ToMat(srcB); Cv2.CvtColor(srcImgB, srcImgB, ColorConversionCodes.BGRA2BGR); Mat mask = new Mat(); double threshold = App.appSettings.DarkAreaThreshold; BackgroundSubtractor backSub = BackgroundSubtractorMOG2.Create(1, threshold, true); //BackgroundSubtractor backSub = BackgroundSubtractorMOG.Create(1, 5, 0.7, 0); //BackgroundSubtractor backSub = BackgroundSubtractorGMG.Create(1, 0.5); backSub.Apply(srcImgB, mask, 1); backSub.Apply(srcImg, mask, 0); Cv2.Threshold(mask, mask, 180, 255, ThresholdTypes.Binary); var element = Cv2.GetStructuringElement( MorphShapes.Rect, new OpenCvSharp.Size(2 * 2 + 1, 2 * 2 + 1), new OpenCvSharp.Point(2, 2)); Mat tmp = new Mat(); Cv2.MorphologyEx(mask, tmp, MorphTypes.Close, element, null, App.appSettings.Iterations); Cv2.MorphologyEx(tmp, mask, MorphTypes.Open, element, null, App.appSettings.Iterations2); Cv2.Erode(mask, tmp, element); dst = BitmapConverter.ToBitmap(tmp); }
void Start() { Tracking.arrayGameObj = this.gameObject; Debug.Log("There are " + cameraArray.Count + " cameras"); cam.Camera = CurrentCamera(); if (CurrentCamera() == null) { throw new Exception("Error, no camera in the array"); } GetControllerScript(CurrentCamera()).WakeUp(); roiRect = null; if (saveVideo) { CreateVideoTargetDirectory(savesFolderName, frameRate); } backgroundSubstractorMOG2 = Video.createBackgroundSubtractorMOG2(); terminationCriteria = new TermCriteria(TermCriteria.EPS | TermCriteria.COUNT, 10, 1); }
public void UpdateBackgroundSubtractor() { subtractor = new BackgroundSubtractorMOG2(1000, 32, true); }
// Use this for initialization void Start() { webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> (); webCamTextureToMatHelper.Init (); backgroundSubstractorMOG2 = Video.createBackgroundSubtractorMOG2 (); // backgroundSubstractorMOG2.setHistory (2); // backgroundSubstractorMOG2.setVarThreshold (16); // backgroundSubstractorMOG2.setDetectShadows (true); }
public OcvOp() { mog2 = BackgroundSubtractorMOG2.Create(); }
static void montion_detect(System.Threading.EventWaitHandle quitEvent = null) { VideoCapture vc = new VideoCapture(0); if (vc.IsOpened) { bool b = vc.SetCaptureProperty(CapProp.Mode, 0); b = vc.SetCaptureProperty(CapProp.FrameHeight, 1944); b = vc.SetCaptureProperty(CapProp.FrameWidth, 2592); BackgroundSubtractorMOG2 bgs = new BackgroundSubtractorMOG2(); bool monition = false; Image <Bgr, Byte> bg_img = null; while (true) { Mat cm = new Mat(); vc.Read(cm); Mat mask = new Mat(); bgs.Apply(cm, mask); Image <Gray, Byte> g = mask.ToImage <Gray, Byte>(); Gray ga = g.GetAverage(); if (ga.MCvScalar.V0 > 11) { // montion if (!monition) { Program.logIt("motion detected!"); Console.WriteLine("Detected montion."); monition = true; System.Threading.Thread.Sleep(500); } } else { // no montion if (monition) { Program.logIt("motion stopped!"); Console.WriteLine("Montion stopped."); monition = false; vc.Read(cm); CvInvoke.Rotate(cm, cm, RotateFlags.Rotate90CounterClockwise); if (bg_img == null) { bg_img = cm.ToImage <Bgr, Byte>(); } if (!handle_motion(cm.ToImage <Bgr, Byte>(), bg_img)) { //bg_img = cm.ToImage<Bgr, Byte>(); } } } GC.Collect(); if (System.Console.KeyAvailable) { ConsoleKeyInfo ki = Console.ReadKey(); if (ki.Key == ConsoleKey.Escape) { Program.logIt("Monitor will terminated by ESC pressed."); break; } } if (quitEvent != null) { if (quitEvent.WaitOne(0)) { Program.logIt("Monitor will terminated by event set."); break; } } } } }
static void montion_detect_1() { VideoCapture vc = new VideoCapture(0); if (vc.IsOpened) { double db = vc.GetCaptureProperty(CapProp.Mode); //bool b = vc.SetCaptureProperty(CapProp.Mode, 1); bool b = vc.SetCaptureProperty(CapProp.Mode, 0); b = vc.SetCaptureProperty(CapProp.FrameHeight, 1944); b = vc.SetCaptureProperty(CapProp.FrameWidth, 2592); if (vc.Grab()) { Mat m = new Mat(); if (vc.Retrieve(m)) { m.Save("temp_1.jpg"); } } //VideoWriter v1 = new VideoWriter("test_1.mp4", (int)vc.GetCaptureProperty(CapProp.Fps), new Size((int)vc.GetCaptureProperty(CapProp.FrameWidth), (int)vc.GetCaptureProperty(CapProp.FrameHeight)), true); //VideoWriter v2 = new VideoWriter("test_2.mp4", (int)vc.GetCaptureProperty(CapProp.Fps), new Size((int)vc.GetCaptureProperty(CapProp.FrameWidth), (int)vc.GetCaptureProperty(CapProp.FrameHeight)), true); BackgroundSubtractorMOG2 bgs = new BackgroundSubtractorMOG2(); bool monition = false; Mat k = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(1, 1)); Image <Gray, Byte> bg_img = null; int index = 1; Console.WriteLine("Camera is ready. Press Esc to exit."); bool device_in_place = false; while (true) { Mat cm = new Mat(); vc.Read(cm); Mat mask = new Mat(); bgs.Apply(cm, mask); //v1.Write(cm); //v2.Write(mask); //img = img.MorphologyEx(MorphOp.Erode, k, new Point(-1, -1), 3, BorderType.Default, new MCvScalar(0)); //CvInvoke.MorphologyEx(mask, mask, MorphOp.Erode, k, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(0)); MCvScalar mean = new MCvScalar(); MCvScalar stdDev = new MCvScalar(); CvInvoke.MeanStdDev(mask, ref mean, ref stdDev); if (mean.V0 > 17) { if (!monition) { Program.logIt("motion detected!"); Console.WriteLine("Detected montion."); monition = true; } } else { if (monition) { Program.logIt("motion stopped!"); Console.WriteLine("Montion stopped."); monition = false; #if true if (bg_img == null) { bg_img = cm.ToImage <Gray, Byte>().Rotate(-90, new Gray(0), false); //bg_img.Save("temp_bg.jpg"); } else { device_in_place = handle_motion_V2(cm.ToImage <Bgr, Byte>().Rotate(-90, new Bgr(0, 0, 0), false), bg_img, index++); if (!device_in_place) { bg_img = cm.ToImage <Gray, Byte>().Rotate(-90, new Gray(0), false); } //Rectangle r = new Rectangle(196, 665, 269, 628); //// check needed. //{ // Image<Gray, Byte> img = cm.ToImage<Gray, Byte>().Rotate(-90, new Gray(0), false); // img.Save($"temp_{index}.jpg"); // img = img.AbsDiff(bg_img); // if (img.GetAverage().MCvScalar.V0 > 10) // { // } // img.Save($"temp_{index}_diff.jpg"); //} //{ // Image<Bgr, Byte> img = cm.ToImage<Bgr, Byte>().Rotate(-90, new Bgr(0, 0, 0), false); // Image<Bgr, Byte> img1 = img.Copy(r); // img1.Save($"temp_{index}_1.jpg"); //} //index++; } #else if (!device_in_place) { bg_img = cm.ToImage <Gray, Byte>().Rotate(-90, new Gray(0), false); } device_in_place = handle_motion(cm.ToImage <Bgr, Byte>().Rotate(-90, new Bgr(0, 0, 0), false), bg_img, index++); #endif } } GC.Collect(); if (System.Console.KeyAvailable) { ConsoleKeyInfo ki = Console.ReadKey(); if (ki.Key == ConsoleKey.Escape) { break; } } } } }
static void montion_detect_v2(System.Threading.EventWaitHandle quitEvent = null) { TcpClient client = new TcpClient(); try { string root = System.IO.Path.Combine(System.Environment.GetEnvironmentVariable("FDHOME"), "AVIA", "frames"); Regex r = new Regex(@"^ACK frame (.+)\s*$", RegexOptions.IgnoreCase); client.Connect(IPAddress.Loopback, 6280); NetworkStream ns = client.GetStream(); byte[] cmd = System.Text.Encoding.UTF8.GetBytes("QueryFrame\n"); byte[] data = new byte[1024]; BackgroundSubtractorMOG2 bgs = new BackgroundSubtractorMOG2(); bool monition = false; Image <Bgr, Byte> bg_img = null; while (true) { System.Threading.Thread.Sleep(500); ns.Write(cmd, 0, cmd.Length); int read = ns.Read(data, 0, data.Length); string str = System.Text.Encoding.UTF8.GetString(data, 0, read); Match m = r.Match(str); if (m.Success) { Mat cm = CvInvoke.Imread(System.IO.Path.Combine(root, m.Groups[1].Value)); Mat mask = new Mat(); bgs.Apply(cm, mask); Image <Gray, Byte> g = mask.ToImage <Gray, Byte>(); Gray ga = g.GetAverage(); if (ga.MCvScalar.V0 > 11) { // montion if (!monition) { Program.logIt("motion detected!"); Console.WriteLine("Detected montion."); monition = true; System.Threading.Thread.Sleep(500); } } else { // no montion if (monition) { Program.logIt("motion stopped!"); Console.WriteLine("Montion stopped."); monition = false; CvInvoke.Rotate(cm, cm, RotateFlags.Rotate90CounterClockwise); if (bg_img == null) { bg_img = cm.ToImage <Bgr, Byte>(); } if (!handle_motion(cm.ToImage <Bgr, Byte>(), bg_img)) { bg_img = cm.ToImage <Bgr, Byte>(); } } } GC.Collect(); if (System.Console.KeyAvailable) { ConsoleKeyInfo ki = Console.ReadKey(); if (ki.Key == ConsoleKey.Escape) { Program.logIt("Monitor will terminated by ESC pressed."); break; } } if (quitEvent != null) { if (quitEvent.WaitOne(0)) { Program.logIt("Monitor will terminated by event set."); break; } } } } } catch (Exception ex) { Program.logIt(ex.Message); Program.logIt(ex.StackTrace); } }
private void Process(object sender, EventArgs e) { lblCarCount.Text = carcount.ToString(); if (bs == null) { bs = new BackgroundSubtractorMOG2(250, 25, true); } if (videoCapture != null && isPlaying) { if (videoCapture.IsOpened) { imgOriginal = videoCapture.QueryFrame(); if (imgOriginal != null) { imgResult = imgOriginal.Clone(); using (Mat tempimg = imgOriginal.Clone()) { imgBS = new Mat(); int gaussianBlurSize = tbGaussianBlur.Value % 2 == 0 ? tbGaussianBlur.Value + 1 : tbGaussianBlur.Value; CvInvoke.GaussianBlur(tempimg, tempimg, new Size(gaussianBlurSize, gaussianBlurSize), 1.0); bs.Apply(tempimg, imgBS); CvInvoke.Threshold(imgBS, imgBS, tbThreshold.Value, 255, ThresholdType.Binary); switch (cbDilEroOrder.SelectedIndex) { case 0: CvInvoke.Dilate(imgBS, imgBS, structuringElement, new Point(-1, -1), tbDilateIter.Value, BorderType.Default, new MCvScalar()); CvInvoke.Erode(imgBS, imgBS, structuringElement, new Point(-1, -1), tbErodeIter.Value, BorderType.Default, new MCvScalar()); break; case 1: CvInvoke.Erode(imgBS, imgBS, structuringElement, new Point(-1, -1), tbErodeIter.Value, BorderType.Default, new MCvScalar()); CvInvoke.Dilate(imgBS, imgBS, structuringElement, new Point(-1, -1), tbDilateIter.Value, BorderType.Default, new MCvScalar()); break; } VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); VectorOfVectorOfPoint filteredContours = new VectorOfVectorOfPoint(); countingLane = new Rectangle(tbCountingLanePosX.Value, tbCountingLanePosY.Value, tbCountingLaneSizeWidth.Value, tbCountingLaneSizeHeight.Value); if (cbxCounting.Checked) { CvInvoke.FindContours(imgBS, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < contours.Size; i++) { double area = CvInvoke.ContourArea(contours[i]); if (area > tbMinSize.Value) { Rectangle rect = CvInvoke.BoundingRectangle(contours[i]); Point middlePoint = CenterOfRect(rect); CvInvoke.Circle(imgResult, middlePoint, 1, SCALAR_BLUE, 2); if (countingLane.Contains(middlePoint)) { CvInvoke.Rectangle(imgResult, rect, SCALAR_RED, 2); LineSegment2D line = new LineSegment2D(); if (cbxIncoming.Checked) { line.P1 = new Point(countingLane.Left, countingLane.Bottom); line.P2 = new Point(countingLane.Right, countingLane.Bottom); } else { line.P1 = new Point(countingLane.Left, countingLane.Top); line.P2 = new Point(countingLane.Right, countingLane.Top); } CvInvoke.Line(imgResult, line.P1, line.P2, SCALAR_WHITE, 1); if (pointCrossLine(middlePoint, line)) { carcount++; Console.WriteLine($"Car count: {carcount}"); CvInvoke.Line(imgResult, line.P1, line.P2, SCALAR_YELLOW, 3); } } } } } else { CvInvoke.Rectangle(imgResult, countingLane, SCALAR_RED, 1); if (cbxIncoming.Checked) { CvInvoke.Line(imgResult, new Point(countingLane.Left, countingLane.Bottom), new Point(countingLane.Right, countingLane.Bottom), SCALAR_YELLOW, 3); } else { CvInvoke.Line(imgResult, new Point(countingLane.Left, countingLane.Top), new Point(countingLane.Right, countingLane.Top), SCALAR_YELLOW, 3); } } ibResult.Image = imgResult; ibOriginal.Image = imgOriginal; ibBs.Image = imgBS; } } else { videoCapture.SetCaptureProperty(CapProp.PosFrames, 0.0); } } } }
// /*void a() * { * UnityEngine.Rect screenRect = new UnityEngine.Rect(d, d, Screen.width - 2 * d, Screen.height - 2 * d); * * if (Input.GetMouseButtonDown(0) && (Input.mousePosition.x >= d) && (Input.mousePosition.y >= d) && (Input.mousePosition.x <= Screen.width - d) && (Input.mousePosition.y <= Screen.height - d)) * { * Debug.Log("mouse"); * Ray mouseRay = Camera.main.ScreenPointToRay(Input.mousePosition); * if (screenRect.Contains(Input.mousePosition)) giu = new Point(Input.mousePosition.x, cam.Camera.pixelRect.height - Input.mousePosition.y); * if (screenRect.Contains(Input.mousePosition)) giu2 = new Point(Input.mousePosition.x, Input.mousePosition.y); * isSelecting = true; * mousePosition1 = Input.mousePosition; * } * if (Input.GetMouseButtonUp(0) && (Input.mousePosition.x >= d) && (Input.mousePosition.y >= d) && (Input.mousePosition.x <= Screen.width - d) && (Input.mousePosition.y <= Screen.height - d)) * { * Ray mouseRay = Camera.main.ScreenPointToRay(Input.mousePosition); * if (screenRect.Contains(Input.mousePosition)) su = new Point(Input.mousePosition.x, cam.Camera.pixelRect.height - Input.mousePosition.y); * if (screenRect.Contains(Input.mousePosition)) su2 = new Point(Input.mousePosition.x, Input.mousePosition.y); * isSelecting = false; * * Debug.Log("dimensioni rettangolo: " + giu + ", " + su); * * if (su == giu) * { * ss = 0; * roiRect = BgSub(); * CamShiftOn = false; * shouldStartCamShift = true; * } * else * { * OpenCVForUnity.Rect rettangolo = Imgproc.boundingRect(new MatOfPoint(giu, su)); * roiRect = rettangolo; * skipFrame = BackFrame; //skip the part of backgroungMOG2 and BgSub() * ss += ss; * } * * } * }*/ // Update is called once per frame void Update() { //if (Input.GetMouseButton(0)) // Debug.Log(Input.mousePosition); dest = new OpenCVForUnity.Mat(); output_ar = new List <OpenCVForUnity.Rect>(); CurrentFrame(); if (shouldStartCamShift) { shouldStartCamShift = false; CamShiftOn = true; Debug.Log("camshift = on"); } bool backToStart = GetComponent <CameraControllerPTZ>().backToStart; Debug.Log("camera_in_position" + backToStart); if (!CamShiftOn && backToStart) { if (skipFrame < BackFrame) { if (skipFrame == 0) { backgroundSubstractorMOG2 = Video.createBackgroundSubtractorMOG2(); } backgroundSubstractorMOG2.apply(rgbMat, fgmaskMat); //roiRect = BgSub(); skipFrame = skipFrame + 1; } else { if (ss == 0) { roiRect = BgSub(); } //Debug.Log(roiRect); if (roiRect != null) { shouldStartCamShift = true; Debug.Log("true"); } } SaveMatToFile("rgbMat." + ss, rgbMat); Debug.Log("backsubtract"); } if (CamShiftOn) { //a(); // permette selezione del target con il mouse cliccando e trascinando per creare un rettangolo hsvMat = new Mat(frame.rows(), frame.cols(), CvType.CV_8UC3); Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGBA2RGB); Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV); //SaveMatToFile3("hsvMat", hsvMat); RunCamShift(); iter++; if (iter > Numiter || roiRect.height > 2 * (Screen.height / 3)) { CamShiftOn = false; skipFrame = 0; } output_ar.Add(roiRect); color1 = new Color(0.1f, 0.9f, 0.1f, 0.1f); color2 = new Color(0.1f, 0.9f, 0.1f); } if (shouldStartCamShift) { Debug.Log("Starting camshift"); hsvMat = new Mat(frame.rows(), frame.cols(), CvType.CV_8UC3); /*roiHSVMat = new Mat (); * SaveMatToFile ("roihsvmatpreRGB"+ss, dest); * Imgproc.cvtColor(roiHSVMat, roiHSVMat, Imgproc.COLOR_RGBA2RGB); * Imgproc.cvtColor(roiHSVMat, roiHSVMat, Imgproc.COLOR_RGB2HSV); //COLOR_RGB2HSV , COLOR_RGB2YCrCb*/ Imgproc.cvtColor(dest, hsvMat, Imgproc.COLOR_RGBA2RGB); Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV); //COLOR_RGB2HSV , COLOR_RGB2YCrCb iter = 0; //shouldStartCamShift = false; InitCamShift(); //CamShiftOn = true; //Debug.Break(); } if (!CamShiftOn) { x_coordinate = 0; y_coordinate = 0; height = 0; width = 0; } else { Debug.Log("passing coordinates to camera"); height = (float)(roiRect.height); width = (float)(roiRect.width); x_coordinate_for_camera = (float)(roiRect.x + (roiRect.width / 2)); y_coordinate_for_camera = (float)(roiRect.y + (roiRect.height / 2)); //Debug.Log("rgbMat.size()" + rgbMat.size()); //Debug.Log ("x_coordinate_for_camera" + x_coordinate_for_camera); //Debug.Log ("y_coordinate_for_camera" + y_coordinate_for_camera); //Debug.Log("height" + height); SaveMatToFile("rgbMat" + ss, rgbMat); //Debug.Log("width" + width + " , screen" + Screen.width); } //skipFrame = skipFrame + 1; }