/
FrmMain.cs
126 lines (105 loc) · 4.78 KB
/
FrmMain.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.IO;
using System.Windows.Forms;
using Emgu.CV;
using Emgu.CV.Structure;
using System.Collections;
using FaceDetection;
using Emgu.CV.Util;
using Emgu.CV.CvEnum;
using System.Windows;
namespace Project_FaceRecognition
{
public partial class FrmMain : Form
{
private Capture _capture;
private CascadeClassifier _cascadeClassifier;
private bool _hasRecognizedFace;
bool motionDetected;
bool trackingEnabled;
public FrmMain()
{
InitializeComponent();
}
private void frmMain_Load(object sender, EventArgs e)
{
trackingEnabled = true;
_capture = new Emgu.CV.Capture();
imgCamUser.Image = _capture.QueryFrame();
Application.Idle += new EventHandler(ProcessFrame);
}
private void ProcessFrame(Object sender, EventArgs args)
{
try
{
Image<Bgr, Byte> Previous_Frame = new Image<Bgr, Byte>(imgCamUser.Image.Bitmap); //Previiousframe aquired
Image<Bgr, Byte> Difference; //Difference between the two frames
Image<Bgr, Byte> thresholdImage=null;
imgCamUser.Image = _capture.QueryFrame();
Image<Bgr, Byte> Frame = new Image<Bgr, Byte>(imgCamUser.Image.Bitmap);
double ContourThresh = 0.003; //stores alpha for thread access
int Threshold = 60; //stores threshold for thread access
Frame.Convert<Gray, Byte>();
Previous_Frame.Convert<Gray, Byte>();
Difference = Previous_Frame.AbsDiff(Frame); //find the absolute difference
/*Play with the value 60 to set a threshold for movement*/
thresholdImage = Difference.ThresholdBinary(new Bgr(Threshold, Threshold, Threshold), new Bgr(255, 255, 255)); //if value > 60 set to 255, 0 otherwise
picCapturedUser.Image= thresholdImage.Convert<Gray, byte>().Copy();
if (trackingEnabled)
{
//check for motion in the video feed
//the detectMotion function will return true if motion is detected, else it will return false.
//set motionDetected boolean to the returned value.
Image<Gray, byte> imgOutput = thresholdImage.Convert<Gray, byte>().ThresholdBinary(new Gray(100), new Gray(255));
label1.Text = "idle";
Emgu.CV.Util.VectorOfVectorOfPoint contours = new Emgu.CV.Util.VectorOfVectorOfPoint();
Mat hier = new Mat();
CvInvoke.FindContours(imgOutput, contours, hier, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
if (contours.Size > 0)
{
motionDetected = true;
label2.Text= contours.Size.ToString();
}
else
{
//reset our variables if tracking is disabled
motionDetected = false;
label1.Text="Idle";
}
if (motionDetected)
{
label1.Text = "motion detected";
}
}
/* using (var imageFrame = _capture.QueryFrame().ToImage<Bgr, Byte>())
{
if (imageFrame != null)
{
var grayframe = imageFrame.Convert<Gray, byte>();
var faces = _cascadeClassifier.DetectMultiScale(grayframe, 1.1, 10, Size.Empty);
foreach (var face in faces)
{
imageFrame.Draw(face, new Bgr(Color.BurlyWood), 3);
//render the image to the picture box
picCapturedUser.Image = imageFrame.Copy(face);
}
}
imgCamUser.Image = imageFrame;
}*/
}
catch (Exception e)
{
//MessageBox.Show(e.ToString());
}
}
private void FrmMain_FormClosed(object sender, FormClosedEventArgs e)
{
_capture.Stop();
_capture.Dispose();
Application.Idle -= ProcessFrame;
}
}
}