예제 #1
0
 public CameraControl()
 {
     this.InitializeComponent();
     mediaCapture       = new MediaCapture();
     emotionRecognition = new EmotionRecognition();
     faceIdentity       = new FaceIdentity();
     emotionScoresList  = new List <EmotionScore>();
     emotionColor       = new EmotionColor();
     InitializeCapture();
 }
예제 #2
0
        String mainWindowTitle             = null;                                       // The title of the main window.

        /***
         *  Function: public InputSelection(bool train, bool disposal, String name, String title, String mwt, FaceIdentity fit)
         *  Parameter(s): bool train
         *      Whether or not to train the Machine (this is passed to the MainWindow.xaml.cs file).
         *                bool disposal
         *      Whether or not to enable resouce disposal.
         *                String name
         *      The name of the person that the Machine will train itself to recognize (if training is enabled; should be "" if trainMachine is false).
         *                String title
         *      The title of the input selection window.
         *                String mainWindowTitle
         *      The title of the main window (this is passed to the MainWindow.xaml.cs file).
         *                FaceIdentity fit
         *      The classification of the face to be examined during training (should be FaceIdentity.FaceNone if the Machine is not training; if the Machine is training).
         *  Return Value: N/A (Constructor)
         ***/
        public InputSelection(bool train, bool disposal, String name, String title, String mwt, FaceIdentity fit)
        {
            InitializeComponent();
            this.Title      = title;
            trainMachine    = train;
            faceIdentity    = fit;
            trainingName    = name;
            mainWindowTitle = mwt;
            if (!disposal)
            {
                resDisposal = false;

                // Completely disable the option (this is only applicable for training).
                resourceDisposal.IsEnabled = false;
            }
        }
        /***
            Function: public InputSelection(bool train, bool disposal, String name, String title, String mwt, FaceIdentity fit)
            Parameter(s): bool train
                Whether or not to train the Machine (this is passed to the MainWindow.xaml.cs file).
                          bool disposal
                Whether or not to enable resouce disposal.
                          String name
                The name of the person that the Machine will train itself to recognize (if training is enabled; should be "" if trainMachine is false).
                          String title
                The title of the input selection window.
                          String mainWindowTitle
                The title of the main window (this is passed to the MainWindow.xaml.cs file).
                          FaceIdentity fit
                The classification of the face to be examined during training (should be FaceIdentity.FaceNone if the Machine is not training; if the Machine is training).
                          MachineStartPage msp
                A reference to the main window to control it.
            Return Value: N/A (Constructor)
        ***/
        public InputSelection(bool train, bool disposal, String name, String title, String mwt, FaceIdentity fit,
            MachineStartPage msp, UIKind uk)
        {
            InitializeComponent();
            machineStartPage = msp;
            this.Title = title;
            trainMachine = train;
            faceIdentity = fit;
            trainingName = name;
            mainWindowTitle = mwt;
            if(!disposal)
            {
                resDisposal = false;

                // Completely disable the option (this is only applicable for training).
                resourceDisposal.IsEnabled = false;
            }
            Closed += InputSelection_Closed;
            uikind = uk;
        }
        /***
         *  Function: public MainWindow(bool train, FaceIdentity fit, String nt, int cn, int fps, bool crd, int mdt, MachineInputSourceKind misk)
         *  Parameter(s): bool train
         *      Whether or not the machine is supposed to be training itself.
         *                FaceIdentity fit
         *      The classification of the person that the Machine is training itself to recognize (supposed to be equal to FaceIdentity.FaceNone during
         *      normal execution).
         *                String nt
         *      The name of the person that the Machine is training itself to recognize (supposed to be equal to "" during normal execution).
         *                int cn
         *      The camera number to used by the 'capture' variable.
         *                int fps
         *      The frame rate of the camera (not used when processing video files).
         *                bool crd
         *      Whether or not the Machine should dispose of resources (could potentially be unstable).
         *                int mdt
         *      The maxDistanceThreshold to use when classifying faces.
         *                 MachineInputSourceKind misk
         *      The input source from which the Machine will gather its input.
         *                 String t
         *      The title of this window.
         *                  MachineStartPage msp
         *      A reference to the main machine window to control.
         *  Return Value: N/A (Constructor)
         ***/
        public MainWindow(bool train, FaceIdentity fit, String nt, int cn, int fps, bool crd, int mdt,
                          MachineInputSourceKind misk, String t, MachineStartPage msp, UIKind uk)
        {
            machineStartPage       = msp;
            cameraResourceDisposal = crd;
            maxDistanceThreshold   = mdt;
            cameraNumber           = cn;
            cameraFrameRate        = fps;
            machineInputSourceKind = misk;
            if (misk == MachineInputSourceKind.SourceFile || misk == MachineInputSourceKind.SourceNone || train)
            {
                speechRecognitionOnline = true;
            }
            else
            {
                speechRecognitionOnline = true;
            }
            userSpeech = "";
            FileUtilities.DirectoryCreation();
            FileUtilities.TrainingDirectoryCreation();
            faceClassifier = new CascadeClassifier("assets\\haarcascade_frontalface_alt.xml");
            uikind         = uk;
            if (uikind == UIKind.UIMachine)
            {
                admin_focus     = new Bitmap("assets\\machine\\admin_focus.jpg");
                secondary_focus = new Bitmap("assets\\machine\\secondary_focus.jpg");
                threat_focus    = new Bitmap("assets\\machine\\threat_focus.jpg");
            }
            else
            {
                admin_focus     = new Bitmap("assets\\samaritan\\deviant_focus.jpg");
                secondary_focus = new Bitmap("assets\\samaritan\\irrelevant_focus.jpg");
                threat_focus    = new Bitmap("assets\\samaritan\\threat_focus.jpg");
            }
            machineIsTraining = train;
            //Check if the CORE_IMAGE_DATA.dat file exists and read the predefined width and height.
            if (File.Exists(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData))
            {
                String[] lines = File.ReadAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData);
                if (lines.Length != 0)
                {
                    String[] heightWidth = lines[0].Split('|');
                    facialRecognitionHeight = Int32.Parse(heightWidth[0]);
                    facialRecognitionWidth  = Int32.Parse(heightWidth[1]);
                }
            }
            FileUtilities.CoreImageDataCreation();
            // Check if the Machine is being trained or not.
            if (!train && fit == FaceIdentity.FaceNone && nt == "")
            {
                String[]                  lines2           = File.ReadAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.AssetIndexData);
                List <FaceIdentity>       faceIdentityList = new List <FaceIdentity>();
                List <String>             nList            = new List <String>();
                List <LBPHFaceRecognizer> lbphList         = new List <LBPHFaceRecognizer>();
                // Load the trained neural networks, list of names, and classifications.
                foreach (String line in lines2)
                {
                    String[] innerSplit = line.Split('^');
                    String   name       = innerSplit[0];
                    String   identifier = innerSplit[1];
                    String   file       = innerSplit[2];
                    if (identifier == adminIdentifier)
                    {
                        faceIdentityList.Add(FaceIdentity.FaceAdmin);
                    }
                    else if (identifier == auxAdminIdentifier)
                    {
                        faceIdentityList.Add(FaceIdentity.FaceAuxAdmin);
                    }
                    else if (identifier == assetIdentifier)
                    {
                        faceIdentityList.Add(FaceIdentity.FaceAsset);
                    }
                    else
                    {
                        PanicAndTerminateProgram();
                    }
                    nList.Add(name);
                    LBPHFaceRecognizer lbph = new LBPHFaceRecognizer();
                    lbph.Load(file);
                    lbphList.Add(lbph);
                }
                focusKindDatabase      = faceIdentityList.ToArray();
                nameDatabase           = nList.ToArray();
                lbphFaceRecognizerList = lbphList.ToArray();
                // Check to make sure the ANN, name, and classification database lengths are all equal.
                if (focusKindDatabase.Length == nameDatabase.Length && nameDatabase.Length == lbphFaceRecognizerList.Length)
                {
                    ;
                }
                else
                {
                    PanicAndTerminateProgram();
                }
            }
            faceIdentityTraining = fit;
            nameTraining         = nt;
            InitializeComponent();
            this.Title = t;

            Closed += (object sender, EventArgs args) =>
            {
                Environment.Exit(1);
            };
        }
예제 #5
0
        /***
            Function: public MainWindow(bool train, FaceIdentity fit, String nt, int cn, int fps, bool crd, int mdt, MachineInputSourceKind misk)
            Parameter(s): bool train
                Whether or not the machine is supposed to be training itself.
                          FaceIdentity fit
                The classification of the person that the Machine is training itself to recognize (supposed to be equal to FaceIdentity.FaceNone during
                normal execution).
                          String nt
                The name of the person that the Machine is training itself to recognize (supposed to be equal to "" during normal execution).
                          int cn
                The camera number to used by the 'capture' variable.
                          int fps
                The frame rate of the camera (not used when processing video files).
                          bool crd
                Whether or not the Machine should dispose of resources (could potentially be unstable).
                          int mdt
                The maxDistanceThreshold to use when classifying faces.
                           MachineInputSourceKind misk
                The input source from which the Machine will gather its input.
                           String t
                The title of this window.
                            MachineStartPage msp
                A reference to the main machine window to control.
            Return Value: N/A (Constructor)
        ***/
        public MainWindow(bool train, FaceIdentity fit, String nt, int cn, int fps, bool crd, int mdt,
            MachineInputSourceKind misk, String t, MachineStartPage msp, UIKind uk)
        {
            machineStartPage = msp;
            cameraResourceDisposal = crd;
            maxDistanceThreshold = mdt;
            cameraNumber = cn;
            cameraFrameRate = fps;
            machineInputSourceKind = misk;
            if (misk == MachineInputSourceKind.SourceWebcam) {
                speechRecognitionOnline = true;
                userSpeech = "";
            }
            FileUtilities.DirectoryCreation();
            FileUtilities.TrainingDirectoryCreation();
            faceClassifier = new CascadeClassifier("assets\\haarcascade_frontalface_alt.xml");
            uikind = uk;
            if (uikind == UIKind.UIMachine) {
                admin_focus = new Bitmap("assets\\machine\\admin_focus.jpg");
                secondary_focus = new Bitmap("assets\\machine\\secondary_focus.jpg");
                threat_focus = new Bitmap("assets\\machine\\threat_focus.jpg");
            }
            else
            {
                admin_focus = new Bitmap("assets\\samaritan\\deviant_focus.jpg");
                secondary_focus = new Bitmap("assets\\samaritan\\irrelevant_focus.jpg");
                threat_focus = new Bitmap("assets\\samaritan\\threat_focus.jpg");
            }
            machineIsTraining = train;
            //Check if the CORE_IMAGE_DATA.dat file exists and read the predefined width and height.
            if (File.Exists(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData))
            {
                String[] lines = File.ReadAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData);
                if (lines.Length != 0)
                {
                    String[] heightWidth = lines[0].Split('|');
                    facialRecognitionHeight = Int32.Parse(heightWidth[0]);
                    facialRecognitionWidth = Int32.Parse(heightWidth[1]);
                }
            }
            FileUtilities.CoreImageDataCreation();
            // Check if the Machine is being trained or not.
            if (!train && fit == FaceIdentity.FaceNone && nt == "")
            {
                String[] lines2 = File.ReadAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.AssetIndexData);
                List<FaceIdentity> faceIdentityList = new List<FaceIdentity>();
                List<String> nList = new List<String>();
                List<LBPHFaceRecognizer> lbphList = new List<LBPHFaceRecognizer>();
                // Load the trained neural networks, list of names, and classifications.
                foreach (String line in lines2)
                {
                    String[] innerSplit = line.Split('^');
                    String name = innerSplit[0];
                    String identifier = innerSplit[1];
                    String file = innerSplit[2];
                    if (identifier == adminIdentifier)
                        faceIdentityList.Add(FaceIdentity.FaceAdmin);
                    else if (identifier == auxAdminIdentifier)
                        faceIdentityList.Add(FaceIdentity.FaceAuxAdmin);
                    else if (identifier == assetIdentifier)
                        faceIdentityList.Add(FaceIdentity.FaceAsset);
                    else
                        PanicAndTerminateProgram();
                    nList.Add(name);
                    LBPHFaceRecognizer lbph = new LBPHFaceRecognizer();
                    lbph.Load(file);
                    lbphList.Add(lbph);
                }
                focusKindDatabase = faceIdentityList.ToArray();
                nameDatabase = nList.ToArray();
                lbphFaceRecognizerList = lbphList.ToArray();
                // Check to make sure the ANN, name, and classification database lengths are all equal.
                if (focusKindDatabase.Length == nameDatabase.Length && nameDatabase.Length == lbphFaceRecognizerList.Length) ;
                else
                    PanicAndTerminateProgram();
            }
            faceIdentityTraining = fit;
            nameTraining = nt;
            InitializeComponent();
            this.Title = t;

            Closed += (object sender, EventArgs args) =>
            {
                Environment.Exit(1);
            };
        }
        /***
         *  Function: private void TrainMachine(FaceIdentity faceIdentity, String name)
         *  Parameter(s): FaceIdentity faceIdentity
         *      Privilege of the face that is being trained to store in the ASSET_INDEX.dat file.
         *                String name
         *      The name of the individual that is being trained; currently, it is not used, but it exists so that in the next version,
         *      the machine can be more customized.
         *  Return Value: void
         ***/
        private void TrainMachine(FaceIdentity faceIdentity, String name)
        {
            // Notify the user that training has begun.
            MessageBox.Show(trainingBeginning, this.Title, MessageBoxButton.OK);

            String[]   fileList = Directory.GetFiles(FileUtilities.TrainingDirectoryName);
            List <Mat> matList  = new List <Mat>();

            foreach (String file in fileList)
            {
                matList.Add(new Mat(file, Emgu.CV.CvEnum.LoadImageType.Unchanged));
            }
            List <Image <Gray, Byte> > list = new List <Image <Gray, Byte> >();

            // Detect each face in each image.
            foreach (Mat mat in matList)
            {
                Image <Gray, Byte> image    = mat.ToImage <Gray, Byte>().Resize(1 / (double)scale_factor, Emgu.CV.CvEnum.Inter.Cubic);
                Rectangle[]        faceList = faceClassifier.DetectMultiScale(image);
                foreach (Rectangle rect in faceList)
                {
                    list.Add(image.Copy(rect).Convert <Gray, Byte>());
                }
            }

            // Make sure that there is at least one face to train.
            if (list.Count() == 0)
            {
                PanicAndTerminateProgram();
            }

            // If a height exists in the CORE_IMAGE_DATA.dat file, resize to that, useful for future training.
            int height = facialRecognitionHeight == 0 ? list[0].Height * scale_factor : facialRecognitionHeight;
            int width  = facialRecognitionWidth == 0 ? list[0].Width * scale_factor : facialRecognitionWidth;

            if (facialRecognitionHeight == 0 || facialRecognitionWidth == 0)
            {
                List <String> lines = new List <String>();
                lines.Add(height + "|" + width);
                File.WriteAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.CoreImageData, lines.ToArray());
            }
            List <Image <Gray, Byte> > listFinal = new List <Image <Gray, Byte> >();

            foreach (Image <Gray, Byte> image in list)
            {
                listFinal.Add(image.Resize(width, height, Emgu.CV.CvEnum.Inter.Cubic));
            }
            List <int> labelList = new List <int>();
            int        integer   = 0;
            String     prefix    = "";
            String     ident     = "";

            if (faceIdentity == FaceIdentity.FaceAdmin)
            {
                integer = 0;
                prefix  = FileUtilities.AdminTrainedPrefix;
                ident   = adminIdentifier;
            }
            else if (faceIdentity == FaceIdentity.FaceAsset)
            {
                integer = 2;
                prefix  = FileUtilities.AssetTrainedPrefix;
                ident   = auxAdminIdentifier;
            }
            else if (faceIdentity == FaceIdentity.FaceAuxAdmin)
            {
                integer = 1;
                prefix  = FileUtilities.AuxAdminTrainedPrefix;
                ident   = assetIdentifier;
            }
            else
            {
                PanicAndTerminateProgram();
            }
            for (int i = 0; i < list.Count(); i++)
            {
                labelList.Add(integer);
            }

            // Train the machine and write its trained state to a file.
            LBPHFaceRecognizer lbphFaceRecognizer = new LBPHFaceRecognizer();

            lbphFaceRecognizer.Train <Gray, Byte>(listFinal.ToArray(), labelList.ToArray());
            Directory.Delete(FileUtilities.TrainingDirectoryName, true);
            String temp  = categories[integer];
            String fname = FileUtilities.DirectoryName + "\\" + prefix + temp.ToUpper().Replace(' ', '_') + FileUtilities.FileExtension;

            lbphFaceRecognizer.Save(fname);

            // Write everything to the ASSET_INDEX.dat file.
            FileUtilities.TrainingDirectoryDeletion();
            List <String> aboutTraining = new List <String>();

            aboutTraining.Add(name + "^" + ident + "^" + fname);
            File.AppendAllLines(FileUtilities.DirectoryName + "\\" + FileUtilities.AssetIndexData, aboutTraining.ToArray());

            // Notify the used that training has ended.
            MessageBox.Show(trainingEnded, this.Title, MessageBoxButton.OK);
        }