コード例 #1
0
        public PredictionResult Test(IArffDataSet testingSet)
        {
            Guard.NotNull(() => testingSet, testingSet);
            var problemSource = problemFactory.Construct(testingSet);

            return(Prediction.Predict(problemSource.GetProblem(), trainedModel, false));
        }
コード例 #2
0
        private double testMulticlassModel(int numberOfClasses, int count, SvmType svm, KernelType kernel, bool probability = false, string outputFile = null)
        {
            Problem        train     = SVMUtilities.CreateMulticlassProblem(numberOfClasses, count);
            Parameter      param     = new Parameter();
            RangeTransform transform = RangeTransform.Compute(train);
            Problem        scaled    = transform.Scale(train);

            param.Gamma       = 1.0 / 3;
            param.SvmType     = svm;
            param.KernelType  = kernel;
            param.Probability = probability;
            if (svm == SvmType.C_SVC)
            {
                for (int i = 0; i < numberOfClasses; i++)
                {
                    param.Weights[i] = 1;
                }
            }

            Model model = Training.Train(scaled, param);

            Problem test = SVMUtilities.CreateMulticlassProblem(numberOfClasses, count, false);

            scaled = transform.Scale(test);
            return(Prediction.Predict(scaled, outputFile, model, false));
        }
コード例 #3
0
        static void Main(string[] args)
        {
            Stopwatch sw = new Stopwatch();

            sw.Start();
            var csvHelper = new CSVFormatter();

            var trainingArray = File.ReadAllLines("ExcelFiles/trainingsample.csv");
            var recordsList   = csvHelper.FormatToRecordList(trainingArray);

            var validationArray       = File.ReadAllLines("ExcelFiles/validationsample.csv");
            var validationrecordsList = csvHelper.FormatToRecordList(validationArray);

            var results  = Prediction.Predict(recordsList, validationrecordsList);
            int accuracy = 0;

            for (var i = 0; i < results.ToArray().Length; i++)
            {
                if (results[i].Number.Equals(validationrecordsList[i].Number))
                {
                    accuracy++;
                }
                //Console.WriteLine($" The closest match for{i}-th record is {results[i].Number} || {validationrecordsList[i].Number} distance of {results[i].DistanceToNumber}");
            }
            Console.WriteLine($"Accuracy is {accuracy} out of {validationArray.Length}");
            sw.Stop();
            Console.WriteLine(sw.Elapsed);
            Console.ReadLine();
        }
コード例 #4
0
    //PREDICTION
    public void SetTransform(Vector3 position, Quaternion rotation)
    {
        startPos = transform.position;
        startRot = transform.rotation;

        interpolationTime = predition.Predict(position, rotation, out endPos, out endRot, out speed);
        time = 0;

        updateTransform = true;
    }
コード例 #5
0
    public void ReceiveTransform(Vector3 pos, Quaternion rot) //USED BY MOVEMENTMANAGER
    {
        // Debug.Log("Enemy: " + this.GetComponent<GameNetworkObject>().NetworkId + "Position is: " + pos);

        startPos          = myTransform.position;
        startRot          = myTransform.rotation;
        interpolationTime = prediction.Predict(pos, rot, out endPos, out endRot, out speed);
        Vector3 normalizedSpeed = speed.normalized;

        animController.Animation(normalizedSpeed.x, normalizedSpeed.z);
        time = 0;
    }
コード例 #6
0
    public void Classify()
    {
        Problem train = Problem.Read(@"e:\ocr\ProcessedImages\train.txt");
        Problem test  = Problem.Read(@"e:\ocr\input\feature.txt");

        Parameter param = new Parameter();
        double    C;
        double    Gamma;

        param.C     = 64;
        param.Gamma = 16;
        Model model = Training.Train(train, param);

        Prediction.Predict(test, @"E:\ocr\results.txt", model, false);
    }
コード例 #7
0
ファイル: Detector.cs プロジェクト: thigiacmaytinh/IPSSedu
        ////////////////////////////////////////////////////////////////////////////////////////////////////////////

        int PredictSVM(String binaryStringSVM, Model model)
        {
            //sử dụng SVM để nhận dạng ký tự
            String kytu = "";
            string temp = Path.GetTempFileName();

            File.WriteAllText(temp, "0 " + binaryStringSVM);

            Problem test = Problem.Read(temp);

            Prediction.Predict(test, temp, model, false);

            kytu = File.ReadAllText(temp);
            return(int.Parse(kytu.Trim()));
        }
コード例 #8
0
        private void sVMToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Problem train = Problem.Read(@"D:\AI.txt");
            Problem test  = Problem.Read(@"D:\test.txt");

            Parameter parameters = new Parameter();

            double C;
            double Gamma;

            parameters.C = 32; parameters.Gamma = 8;
            Model model = Training.Train(train, parameters);

            Prediction.Predict(test, @"D:\result.txt", model, false);
        }
コード例 #9
0
        private void button3_Click(object sender, EventArgs e)
        {
            OpenFileDialog openDialog = new OpenFileDialog();

            if (openDialog.ShowDialog() == DialogResult.OK)
            {
                Bitmap img = new Bitmap(openDialog.FileName);
                pictureBox1.Image = img;
                img = preProcess((Bitmap)img.Clone());

                HOEF    hoefObj       = new HOEF();
                float[] featureVector = hoefObj.Apply(img);

                List <float[]> features = new List <float[]>();
                features.Add(featureVector);

                FileStream fs =
                    new FileStream("Test", FileMode.Create, FileAccess.Write);
                StreamWriter sw = new StreamWriter(fs);

                WriteToFile(features, 50, ref sw);

                sw.Flush();
                sw.Close();
                fs.Close();

                Problem test = Problem.Read("Test");
                Prediction.Predict(test, "result", model, false);

                FileStream fsRead =
                    new FileStream("result", FileMode.Open, FileAccess.Read);

                StreamReader sr = new StreamReader(fsRead);

                string result = sr.ReadLine();
                sr.Close();
                fsRead.Close();

                int iResult = Int32.Parse(result);

                char[] lookupTable = { 'A', 'B', 'C', 'D' };

                string output = lookupTable[iResult].ToString();

                label3.Text = output;
            }
        }
コード例 #10
0
ファイル: SVMTester.cs プロジェクト: dtbinh/SVNBackup
        public void Test()
        {
            int datasetSize = _dataMgr.count;
            int dimension   = _dataMgr.inputNum;

            Node[][] inputs = new Node[datasetSize][];
            int[]    labels = new int[datasetSize];

            for (int i = 0; i < datasetSize; i++)
            {
                int index = _rnd.Next(datasetSize - 1);
                inputs[i] = new Node[dimension];
                double[] input = _dataMgr.GetInputData(index);
                for (int j = 0; j < dimension; j++)
                {
                    inputs[i][j]       = new Node();
                    inputs[i][j].Index = j;
                    inputs[i][j].Value = input[j];
                }

                if (_dataMgr.GetLabelData(index) < 0.5)
                {
                    labels[i] = -1;
                }
                else
                {
                    labels[i] = 1;
                }
            }

            int successCnt = 0;

            for (int i = 0; i < datasetSize; i++)
            {
                double value = Prediction.Predict(_model, inputs[i]);


                if ((int)value == (int)labels[i])
                {
                    successCnt++;
                }
                Console.WriteLine("compare " + labels[i] + " and " + value);
            }

            Console.WriteLine("success rate:" + (double)successCnt / (double)datasetSize);
        }
コード例 #11
0
    public void ReceiveTransform(Vector3 pos, Quaternion rot) //USED BY MOVEMENTMANAGER
    {
        //Debug.Log("Enemy: " + gnObject.NetworkId + "Position is: " + pos);

        //oldDestination = nextDestination;
        //oldQuatenion = nextQuaternion;

        //nextDestination = pos;
        //nextQuaternion = rot;

        startPos          = transform.position;
        startRot          = transform.rotation;
        interpolationTime = prediction.Predict(pos, rot, out endPos, out endRot, out speed);
        Vector3 normalizedSpeed = speed.normalized;

        animController.Animation(normalizedSpeed.x, normalizedSpeed.z);
        time = 0;
    }
コード例 #12
0
        public override double Forecast(double[] vector)
        {
            double result = 0;

            if (TrainedModel != null && vector != null)
            {
                Node[] node = new Node[vector.Length];
                for (int i = 0; i < vector.Length; i++)
                {
                    node[i]       = new Node();
                    node[i].Index = i + 1;
                    node[i].Value = vector[i];
                }
                node   = mRange.Transform(node);
                result = Prediction.Predict(TrainedModel, node);
            }
            return(result);
        }
コード例 #13
0
    public void ReceiveTransform(Vector3 pos, Quaternion rot)
    {
        Debug.Log("Enemy: " + this.GetComponent <GameNetworkObject>().NetworkId + "Position is: " + pos);

        //oldDestination = nextDestination;
        //oldQuatenion = nextQuaternion;

        //nextDestination = pos;
        //nextQuaternion = rot;

        startPos          = transform.position;
        startRot          = transform.rotation;
        interpolationTime = prediction.Predict(pos, rot, out endPos, out endRot, out speed);
        Vector3 normalizedSpeed = speed.normalized;

        animController.Animation(normalizedSpeed.x, normalizedSpeed.z);
        time = 0;
    }
コード例 #14
0
        private double testRegressionModel(int count, SvmType svm, KernelType kernel, string outputFile = null)
        {
            Problem        train     = SVMUtilities.CreateRegressionProblem(count);
            Parameter      param     = new Parameter();
            RangeTransform transform = RangeTransform.Compute(train);
            Problem        scaled    = transform.Scale(train);

            param.Gamma      = 1.0 / 2;
            param.SvmType    = svm;
            param.KernelType = kernel;
            param.Degree     = 2;

            Model model = Training.Train(scaled, param);

            Problem test = SVMUtilities.CreateRegressionProblem(count, false);

            scaled = transform.Scale(test);
            return(Prediction.Predict(scaled, outputFile, model, false));
        }
コード例 #15
0
        private void InnerLoop(int n)
        {
            List <Seed> newGeneration = initialSeeds;

            for (int iterations = 0; iterations < n; iterations++)
            {
                List <Seed> selectedSeeds = selection.Select(newGeneration);
                //Console.WriteLine("The average SSE of generation: " + iterations);
                Console.WriteLine(selectedSeeds.Average(seed => seed.GetSSE()));
                for (int i = 0; i < selectedSeeds.Count - 1; i += 2)
                {
                    Tuple <Seed, Seed> children = crossover.Crossover(selectedSeeds[i], selectedSeeds[i + 1]);
                    currentSeeds.Add(children.Item1);
                    currentSeeds.Add(children.Item2);
                }

                for (int i = 0; i < currentSeeds.Count; i++)
                {
                    mutation.Mutate(currentSeeds[i]);
                }
                for (int i = 0; i < currentSeeds.Count; i++)
                {
                    List <double> predictions = fitnessEvaluator.GeneratePredictions(currentSeeds[i]);
                    currentSeeds[i].SetSSE(fitnessEvaluator.EvaluateFitness(predictions));
                }
                // Console.WriteLine("The lowest SSE of generation: " + iterations);
                //Console.WriteLine(currentSeeds.Min(seed => seed.GetSSE()));
                newGeneration = elitism.Preserve(currentSeeds);
                currentSeeds  = new List <Seed>();
            }
            List <double> list = new List <double>()
            {
                1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1
            };

            /*List<double> coefficients = new List<double>() { -0.1, -0.03, -0.03, -0.01, 0.22, -0.27, -0.24, 0.35, 0.29, 0.33, 0.19, 0.23, 0.15, 0.16, -0.16, 0.16, 0.19, -0.21, -0.24, 0.48 };
             * Prediction prediction = new Prediction(new Seed(coefficients), list);*/
            Prediction prediction = new Prediction(elitism.GetBestSeed(), list);

            prediction.Predict();
        }
コード例 #16
0
ファイル: Form1.cs プロジェクト: aucan/IronicSA
        private double TrainAndTest(string trainSet, string testSet, string resultFile)
        {
            Problem   train      = Problem.Read(trainSet);
            Problem   test       = Problem.Read(testSet);
            Parameter parameters = new Parameter();

            if (chClassification.Checked)
            {
                parameters.SvmType = SvmType.C_SVC;
                parameters.C       = 0.03;
                parameters.Gamma   = 0.008;
            }
            else
            {
                parameters.SvmType = SvmType.EPSILON_SVR;
                parameters.C       = 8;
                parameters.Gamma   = 0.063;
                parameters.P       = 0.5;
            }
            Model model = Training.Train(train, parameters);

            return(Prediction.Predict(test, resultFile, model, true));
        }
コード例 #17
0
        private double testTwoClassModel(int count, SvmType svm, KernelType kernel, bool probability = false, string outputFile = null)
        {
            Problem        train     = SVMUtilities.CreateTwoClassProblem(count);
            Parameter      param     = new Parameter();
            RangeTransform transform = RangeTransform.Compute(train);
            Problem        scaled    = transform.Scale(train);

            param.Gamma       = .5;
            param.SvmType     = svm;
            param.KernelType  = kernel;
            param.Probability = probability;
            if (svm == SvmType.C_SVC)
            {
                param.Weights[-1] = 1;
                param.Weights[1]  = 1;
            }

            Model model = Training.Train(scaled, param);

            Problem test = SVMUtilities.CreateTwoClassProblem(count, false);

            scaled = transform.Scale(test);
            return(Prediction.Predict(scaled, outputFile, model, false));
        }
コード例 #18
0
    //Function checks if the game has not ended when the mouse is down along with if the player ball is touching floor and if so will freeze the gameobject before it will fire it
    private void Update()
    {
        if (lM.GetDistance() <= 1.2f && Input.GetMouseButtonDown(0) && lM.gameState == 0)
        {
            lM.NowFiring(false);
            if (currentProjectile <= projectiles.Length - 1)
            {
                switch (projectiles[currentProjectile].GetComponent <Projectile>().GetProjectileType())
                {
                //BluePotion
                case 0:
                    temporaryPower = defaultPower * 6;
                    break;

                //Brick
                case 1:
                    temporaryPower = defaultPower * 4;
                    break;

                //Bullet
                case 2:
                    temporaryPower = defaultPower * 11;
                    break;

                //Cannon ball
                case 3:
                    temporaryPower = defaultPower * 4.25f;
                    break;

                //Cone
                case 4:
                    temporaryPower = defaultPower * 9f;
                    break;

                //GreenPotion
                case 5:
                    temporaryPower = defaultPower * 6;
                    break;

                //HolyGrenade
                case 6:
                    temporaryPower = defaultPower * 6.5f;
                    break;

                //MagmaBall
                case 7:
                    temporaryPower = defaultPower * 3.5f;
                    break;

                //PurplePotion
                case 8:
                    temporaryPower = defaultPower * 6;
                    break;

                //RedPotion
                case 9:
                    temporaryPower = defaultPower * 6;
                    break;

                //Skull
                case 10:
                    temporaryPower = defaultPower * 7;
                    break;

                //Wing
                case 11:
                    temporaryPower = defaultPower * 10f;
                    break;
                }
                //Turn on the update frame mouse direction
                preparing = true;
            }
        }
        if (preparing)
        {
            //Get camera's negatize z axis pos (or distance from camera to our target)
            float z = -Camera.main.transform.position.z;
            //Screen to world point converts our X and Y screen position into world space 'Z' distance away from our camera
            //As our camera is -z from 0, this will end up on the XY plane (or where Z is equal to 0)
            Vector3 mousePositionInWorldSpace = Camera.main.ScreenToWorldPoint(new Vector3(Input.mousePosition.x, Input.mousePosition.y, z));
            //Calculate the distance from the plater position from the mouse position in world space
            Vector3 differenceDistance = transform.position - mousePositionInWorldSpace;
            //Draw the mouse line
            Debug.DrawLine(mousePositionInWorldSpace, transform.position, Color.white);
            distance = Vector3.Distance(Camera.main.ScreenToWorldPoint(Input.mousePosition), transform.position);
            if (distance < 1.18f && distance > 1f)
            {
                velocity      = differenceDistance * temporaryPower;
                priorVelocity = velocity;
            }
            else
            {
                priorVelocity = differenceDistance * temporaryPower;
                velocity      = Vector3.Lerp(transform.position, priorVelocity, 0.9f - (distance * 0.1f));
            }

            if (Input.GetMouseButtonUp(0) && cannon == null)
            {
                preparing = false;
                Fire(velocity);
                bTouchedStageFloor = false;
            }
            else if (cannon == null || currentProjectile == 0)
            {
                //transform.right = new Vector3(transform.position.x - 180, transform.position.y, transform.position.z);
                if (prediction != null /*&& lM.enablePrediction*/)
                {
                    prediction.Predict(velocity);
                }

                if (GetComponent <LineRenderer>().positionCount >= 1)
                {
                    transform.right = GetComponent <LineRenderer>().GetPosition(1) - transform.position;
                }
            }
        }
    }
コード例 #19
0
ファイル: Program.cs プロジェクト: AymanNabih/SVM-Repository
        static void Main(string[] args)
        {
            SpamDSClass        sds = new SpamDSClass();
            FireFlyAlgorithm   ff  = new FireFlyAlgorithm();
            ParameterSelection ps  = new ParameterSelection();
            //reading the phishing emails and the test Datas
            string testFileFolderName = "TrainTestDataSpam";
            string trainTestDataPath  = String.Format(Environment.CurrentDirectory + "\\{0}", testFileFolderName); //filepath for the training and test dataset

            string[] trainTestFileURL = System.IO.Directory.GetFileSystemEntries(trainTestDataPath);

            string extractedVectorsFolderName = "ExtractedFeatures";
            string extractedVectorsFilePath   = String.Format(Environment.CurrentDirectory + "\\{0}", extractedVectorsFolderName);

            string[] evFileURL = System.IO.Directory.GetFileSystemEntries(extractedVectorsFilePath);

            string   outputEvaluationFilePath = String.Format(Environment.CurrentDirectory + "\\{0}", "OutputEvaluations.txt");
            string   outpuVectorFilePath      = String.Format(Environment.CurrentDirectory + "\\{0}", "OutputVectors.txt");
            PDSClass pds = new PDSClass();

            //string SVAccuracyFilePath = String.Format(Environment.CurrentDirectory + "\\{0}", "SVAccuracy.txt");


            /**
             * //extract and format spam words
             * string spamWordsFilePath = String.Format(Environment.CurrentDirectory + "\\{0}", "StopWords.txt");
             * sds.ExtractFormatSpamWords(spamWordsFilePath);
             **/

            /**
             * //Extract UCI dataset
             * string UCIInputFilePath = String.Format(Environment.CurrentDirectory + "\\{0}", "SpamDataset.txt");
             * sds.ExtractFormatUCIDataset(UCIInputFilePath);
             * string UCIOutputFilePath = String.Format(Environment.CurrentDirectory + "\\{0}", "SpamDatasetOutput.txt");
             * sds.ExtractVectorUCIDataset(UCIOutputFilePath);
             ***/

            /**
             * //Extract UCI Card dataset
             * string outputFileName = "ZooOutput.txt";
             * string CreditCardInputFilePath = String.Format(Environment.CurrentDirectory + "\\{0}", "Zoo.txt");
             * sds.ExtractFormatUCIDataset(CreditCardInputFilePath, outputFileName); //extract, format and write vectors to file
             * string UCIOutputFilePath = String.Format(Environment.CurrentDirectory + "\\{0}", outputFileName);
             * sds.ExtractVectorUCIDataset(UCIOutputFilePath, outputFileName);
             ***/

            /**
             * //extract and format credit card dataset
             * string outputFileName = "CreditCardDSOutput.txt";
             * //string CreditCardInputFilePath = String.Format(Environment.CurrentDirectory + "\\{0}", "CreditCardDS.txt");
             * //sds.ExtractFormatUCIDataset(CreditCardInputFilePath, outputFileName); //extract, format and write vectors to file
             * string OutputFilePath = String.Format(Environment.CurrentDirectory + "\\{0}", outputFileName);
             * sds.ExtractVectorCreditCard(OutputFilePath, outputFileName);
             * //sds.ReadVectorFromFile(OutputFilePath); //read vector values from file
             **/

            File.WriteAllText(ps.SVAccuracyFilePath, string.Empty);
            File.WriteAllText(outputEvaluationFilePath, string.Empty); //deleting the contents of the file holding the results for new results
            File.WriteAllText(ps.filePath, string.Empty);              //deleting the contents of the file holding the extracted C,Gamma and CV values
            File.WriteAllText(sds.extractedFeaturesFilePathTrainDS, string.Empty);
            File.WriteAllText(ps.filePath2, string.Empty);
            File.WriteAllText(ps.filePath3, string.Empty);
            //File.WriteAllText(outpuVectorFilePath, string.Empty); //deleting the contents of the file holding the results for new results

            //int NumofFeatures = 17;
            double DatasetEntropy = 0.0;
            int    TotalMails     = 0;

            //string[] features = sds.Features(); //saving the features in the string array

            int[,] HamSpamCount = new int[, ] {
            };                                    //count for the total number of ham and phish emails

            string[] testTrainFolderFiles = new string[] { };
            string   folderFiles;

            //double[] informationGain = new double[NumofFeatures];
            //Dictionary<string, double> feat_infoGain = sds.Feature_InfoGain(features, informationGain); //saving the information gain for all the individual features

            Dictionary <string, double> feat_infoGain = new Dictionary <string, double>(); //saving the information gain for all the individual features
            double   classficatnAccuracySum           = 0.0;                               //cummulative classification accuracy
            double   classficatnAccuracy = 0;                                              //classification accuracy for each iteration
            DateTime start = DateTime.Now;
            double   totalPhishing = 0, totalHam = 0; int mailGrandTotal = 0;

            //initializing all the variables that will used for evaluating the performance of the classifier
            double FP = 0.0, FN = 0.0, TP = 0.0, TN = 0.0, P = 0.0, F_M = 0.0, sumFP = 0.0, sumF_M = 0.0, sumFN = 0.0, sumTP = 0.0, sumTN = 0.0, sumP = 0.0, sumTime = 0.0;
            double storagePercentage = 0.0, sumStoragePercentage = 0.0;

            int N_Fold = 10; //number of folds
            int n_Runs = 1;  //number of runs

            double[,] NormalizedVector = new double[, ] {
            };                                              //normalized vector values for each features

            Program p = new Program();
            double  avgRuns = 0.0, avgFP = 0.0, avgFN = 0.0, avgR = 0.0, avgPr = 0.0, avgFM = 0.0, avgTime = 0.0, avgStoragePercentage = 0.0; //avg=> average
            double  globalBest = double.MinValue;

            //change the boolean value appropriately to choose the task you want to perform (either vector value extraction or email classification)
            //Note: Both values must not be true! This is to reduce processing time
            bool extractVectorValues = false; //switch for extract
            bool Emailclassification = true;  //switch for classification

            double C     = new double();
            double Gamma = new double();

            //double[] CV = new double[2];
            List <double>  CV1        = new List <double>();                                         //Save the CV accuracy, C and Gamma for comparison
            List <double>  CV2        = new List <double>();                                         //Save the CV accuracy, C and Gamma for comparison
            List <int[, ]> vectorList = new List <int[, ]>();                                        //save the list of vectors for each fold
            List <Dictionary <string, int> > trainMailList = new List <Dictionary <string, int> >(); //save training mails for each fold
            List <Dictionary <string, int> > testMailList  = new List <Dictionary <string, int> >(); //save test emails for each fold
            List <string[]> featList      = new List <string[]>();
            int             NumofFeatures = 0;

            int[,] vector = new int[, ] {
            };
            Dictionary <string, int> trainMail_Class = new Dictionary <string, int>(); //variable containing the emails and classes of all the training emails
            Dictionary <string, int> testMail_Class  = new Dictionary <string, int>(); //variable containing the emails and classes of all the test emails

            string[] features = new string[] { };
            List <Dictionary <string, double> > feat_infoGainList = new List <Dictionary <string, double> >();

            //for (int aa = 0; aa < n_Runs; aa++)
            //{

            //    classficatnAccuracySum = 0.0;
            //    sumFP = 0.0;
            //    sumTP = 0.0; //Recall
            //    sumFN = 0.0;
            //    sumF_M = 0.0;
            //    sumP = 0.0;
            //    sumTime = 0.0;
            //for (int a = 0; a < N_Fold; a++)
            //{
            if (extractVectorValues == true) //if the value of ExtractVectorValues is true, only extract email vector values and dont perform classification
            {
                for (int a = 0; a < N_Fold; a++)
                {
                    n_Runs = 1;                                // change number of runs from its default value (i.e 10) to 1 (to avoid repeating the extraction process 10 times) since we wanna do is to extract the vector values from each emails
                    string[] trainFileURLs = new string[] { }; //urls for the train emails (i.e. the emails used for training the classifier)
                    string[] testFileURLs  = new string[] { }; //urls for the test emails (i.e. the emails used for testing the classifier)
                    trainMail_Class = new Dictionary <string, int>();
                    testMail_Class  = new Dictionary <string, int>();
                    string[] trainMailFileNames = new string[trainMail_Class.Count]; //the file names for all the emails in the training dataset
                    string[] testMailFileNames = new string[] { };                   //the file names for all the emails in the test dataset
                    string   trainMailLabel; int spamCount = 0, hamCount = 0; double phishPercentage, hamPercentage;
                    features = new string[] { };

                    //processing the training dataset for the each fold
                    for (int i = 0; i < trainTestFileURL.Length; i++)
                    {
                        if (i.Equals(a))
                        {
                            continue;                                                                //skipping one email folder, which is to be used for testing the trained classifier (i.e. the current test dataset)
                        }
                        testTrainFolderFiles = System.IO.Directory.GetFiles(trainTestFileURL[i]);    //getting the filenames of all the emails in the training dataset
                        trainFileURLs        = trainFileURLs.Concat(testTrainFolderFiles).ToArray(); //get all the urls for the test emails
                        trainMailFileNames   = sds.getFileNames(trainFileURLs);                      //get the file names for all the test mails
                        for (int j = 0; j < testTrainFolderFiles.Length; j++)                        //processing all the emails in the current training dataset for classification
                        {
                            trainMailLabel = trainMailFileNames[j].Substring(0, 2);                  //getting the label for each email, HM(for Ham Mails) and PM(for Phish Mails)
                            //folderFiles = File.ReadAllText(testTrainFolderFiles[j]); //extracting the content of each email in each email folder
                            //trainMail_Class[sds.ProcessMails(folderFiles)] = (trainMailLabel.Equals("SM")) ? 1 : 0; //processing each email and assigning label to the emails based on the folders each emails come from.
                            trainMail_Class[testTrainFolderFiles[j]] = (trainMailLabel.Equals("SM")) ? 1 : 0; //processing each email and assigning label to the emails based on the folders each emails come from.
                            if (trainMail_Class.ElementAt(j).Value == 1)
                            {
                                spamCount++; //counting the total number of ham and phishing to get their percentage
                            }
                            else
                            {
                                hamCount++;
                            }
                        }
                    }

                    //processing the test dataset for each fold
                    for (int i = a; i < a + 1; i++)
                    {
                        testTrainFolderFiles = System.IO.Directory.GetFiles(trainTestFileURL[i]);
                        testFileURLs         = testFileURLs.Concat(testTrainFolderFiles).ToArray();
                        testMailFileNames    = sds.getFileNames(testFileURLs);
                        for (int j = 0; j < testTrainFolderFiles.Length; j++)
                        {
                            trainMailLabel = testMailFileNames[j].Substring(0, 2);
                            //folderFiles = File.ReadAllText(testTrainFolderFiles[j]);
                            //testMail_Class[sds.ProcessMails(folderFiles)] = (trainMailLabel.Equals("SM")) ? 1 : 0; //processing each email and assigning label to the emails based on the folders each emails come from.
                            testMail_Class[testTrainFolderFiles[j]] = (trainMailLabel.Equals("SM")) ? 1 : 0; //processing each email and assigning label to the emails based on the folders each emails come from.
                            if (testMail_Class.ElementAt(j).Value == 1)
                            {
                                spamCount++;
                            }
                            else
                            {
                                hamCount++;
                            }
                        }
                    }

                    //calculating the percentage of spam and ham email in the dataset
                    phishPercentage = (double)spamCount / (double)(trainMail_Class.Count + testMail_Class.Count);
                    hamPercentage   = (double)hamCount / (double)(trainMail_Class.Count + testMail_Class.Count);
                    mailGrandTotal  = spamCount + hamCount;
                    totalHam        = hamCount; totalPhishing = spamCount;


                    //Information Gain

                    features      = pds.Features();
                    NumofFeatures = features.Count();
                    TotalMails    = trainMail_Class.Count;
                    vector        = new int[TotalMails, NumofFeatures];
                    double[] informationGainn = new double[NumofFeatures];
                    pds.processVector(vector, trainMail_Class, features, trainFileURLs, NumofFeatures);                 //extracting the vector values of all the features
                    int[,] HamPhishCount = new int[NumofFeatures, 4];
                    pds.FeatureVectorSum(NumofFeatures, TotalMails, vector, trainMail_Class, HamPhishCount);            // calculating the total number of zeros and ones for both phishing and ham emails
                    DatasetEntropy = pds.Entropy(trainMail_Class);                                                      //calculating the entropy for the entire dataset
                    pds.CalInformationGain(NumofFeatures, HamPhishCount, informationGainn, TotalMails, DatasetEntropy); //calculating information gain for each feature
                    feat_infoGain = pds.Feature_InfoGain(features, informationGainn);                                   //assisgning the calculated information gain to each feature

                    //process vector for training Dataset
                    int      NumofFeatures2 = NumofFeatures - 9;
                    string[] newFeatures    = new string[NumofFeatures2];
                    for (int i = 0; i < NumofFeatures2; i++)
                    {
                        newFeatures[i] = feat_infoGain.ElementAt(i).Key; //copying the best 8 features with the highest information gain
                    }

                    vector = new int[TotalMails, NumofFeatures2];
                    pds.processVector(vector, trainMail_Class, newFeatures, trainFileURLs, NumofFeatures2);

                    //extract vectors of the training data
                    pds.extractVectors(vector, trainMail_Class, NumofFeatures2, "trainingDS", a);

                    //process vector for testing Dataset
                    TotalMails = testMail_Class.Count;
                    vector     = new int[TotalMails, NumofFeatures2];
                    pds.processVector(vector, testMail_Class, newFeatures, testFileURLs, NumofFeatures2);
                    NormalizedVector = ff.Normalize(vector); //normalize the all vector values for test data

                    //extract vectors of the test data
                    pds.extractVectors(vector, testMail_Class, NumofFeatures2, "testDS", a);

                    /***
                     * vector = sds.processTrainVector(trainMail_Class, ref features); //extracting the vector values of all the features
                     *
                     * TotalMails = trainMail_Class.Count;
                     * NumofFeatures = vector.GetLength(1);
                     * double[] informationGain = new double[NumofFeatures];
                     * HamSpamCount = new int[NumofFeatures, 4];
                     * sds.FeatureVectorSum(NumofFeatures, TotalMails, vector, trainMail_Class, HamSpamCount); // calculating the total number of zeros and ones for both phishing and ham emails
                     * DatasetEntropy = sds.Entropy(trainMail_Class, NumofFeatures); //calculating the entropy for the entire dataset
                     *
                     * sds.CalInformationGain(NumofFeatures, HamSpamCount, informationGain, TotalMails, DatasetEntropy);//calculating information gain for each feature
                     * feat_infoGain = sds.Feature_InfoGain(features, informationGain, NumofFeatures); //assisgning the calculated information gain to each feature
                     *
                     * //extract features with information gain greater than zero
                     * List<string> topFeat = new List<string>();
                     * foreach (KeyValuePair<string, double> feat in feat_infoGain)
                     * {
                     *  if (feat.Value > 0)
                     *      topFeat.Add(feat.Key);
                     * }
                     *
                     * NumofFeatures = topFeat.Count; //update number of features to number of features with high information gain
                     * vector = sds.processVector(trainMail_Class, topFeat.ToArray(), NumofFeatures); //extracting the vector values of all the features
                     *
                     * vectorList.Add(vector); //save vectors for each fold
                     * trainMailList.Add(trainMail_Class); //save training mails for each fold
                     * testMailList.Add(testMail_Class); //save test mails for each fold
                     * featList.Add(topFeat.ToArray());
                     * feat_infoGainList.Add(feat_infoGain);
                     * }
                     *
                     *
                     * //get the feature with the lowest count
                     * NumofFeatures = vectorList[0].GetLength(1); //get the number of features for each fold
                     * foreach (int[,] vec in vectorList)
                     * {
                     * int NumofFeat = vec.GetLength(1);
                     * if (NumofFeat < NumofFeatures)
                     *  NumofFeatures = NumofFeat; //get the feature with the lowest count
                     * }
                     *
                     * if (NumofFeatures >= 40) //retrict number of features to 100, if number of selected features is greater than 100
                     * NumofFeatures = 40;
                     *
                     * //For each fold, process the extracted vectors
                     * for (int a = 0; a < N_Fold; a++)
                     * {
                     * //extract vectors of the training data
                     * sds.extractVectors(vectorList[a], trainMailList[a], NumofFeatures, "trainingDS", a);
                     *
                     * //process vector for testing Dataset
                     * TotalMails = testMailList[a].Count;
                     * vector = new int[TotalMails, NumofFeatures];
                     * vector = sds.processVector(testMailList[a], featList[a], NumofFeatures); //extracting the vector values of all the features
                     * sds.extractVectors(vector, testMailList[a], NumofFeatures, "testDS", a); //extract vectors of the test data
                     *
                     * if (a.Equals(9))
                     * {
                     *  Console.Write("Extraction Completed....");
                     *  Console.ReadKey();
                     * }
                     ***/
                }
            }
            else //perform email classification - don't extract!
            {
                n_Runs = 1;
                for (int r = 0; r < n_Runs; r++)
                {
                    classficatnAccuracySum = 0.0; sumFP = 0.0; sumTP = 0.0; sumFN = 0.0; sumF_M = 0.0; sumP = 0.0; sumTime = 0.0; sumStoragePercentage = 0.0;
                    for (int a = 0; a < N_Fold; a++)
                    {
                        if (Emailclassification == true) //if the value of EmailClassification is true, perform email classification and dont extract emails
                        {
                            //SVM Classfication begins here

                            DateTime start1 = DateTime.Now;

                            //First, read in the training and test data.
                            Problem train = Problem.Read(string.Format("ExtractedFeaturesTrain{0}.{1}", (a + 1).ToString(), "txt"));
                            Problem test  = Problem.Read(string.Format("ExtractedFeaturesTest{0}.{1}", (a + 1).ToString(), "txt"));

                            //scalling the data
                            GaussianTransform gt = GaussianTransform.Compute(train);
                            Problem           trainScaledProblem = gt.Scale(train);
                            Problem           testScaledProblem  = gt.Scale(test);

                            /**
                             * //count total number of positives and negative instances
                             * int trp = trainScaledProblem.Y.Count(s => s == 1);
                             * int trN = trainScaledProblem.Y.Count(s => s == -1);
                             * int tep = testScaledProblem.Y.Count(s => s == 1);
                             * int teN = testScaledProblem.Y.Count(s => s == -1);
                             * int totp = trp + tep;
                             * int totN = trN + teN;
                             **/
                            //For this example (and indeed, many scenarios), the default parameters will suffice.
                            Parameter parameters = new Parameter();
                            //double C = new double();
                            //double Gamma = new double();

                            Console.WriteLine("\nClassification Number {0} Step: {1}...............................\n", a + 1, r + 1);

                            //This will do a grid optimization to find the best parameters and store them in C and Gamma, outputting the entire
                            //search to params.txt.

                            /*
                             * if (a == 0)
                             * {
                             *   ParameterSelection.Grid(trainScaledProblem, parameters, "params.txt", out C, out Gamma);
                             *   CV1.Add(ParameterSelection.CVAccuracy);
                             *   CV1.Add(C);
                             *   CV1.Add(Gamma);
                             * }
                             * else if (a == 1)
                             * {
                             *   ParameterSelection.Grid(trainScaledProblem, parameters, "params.txt", out C, out Gamma);
                             *   CV2.Add(ParameterSelection.CVAccuracy);
                             *   CV2.Add(C);
                             *   CV2.Add(Gamma);
                             *
                             *   if (CV1[0] > CV2[0]) //if the previous CV rate is greater than the present, then, discard the present and use the C and Gamma of the previous.
                             *   {
                             *       C = CV1[1];
                             *       Gamma = CV1[2];
                             *   }
                             *
                             * }*/

                            /**
                             * //Standard SVM (i.e. SVM without instance selection)
                             * ParameterSelection.Grid(trainScaledProblem, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(trainScaledProblem, parameters);
                             **/

                            //Bootstrap Sampling Method
                            //Training.samplingGellingPoint(trainScaledProblem, testScaledProblem);
                            //int subsetNumber = 5;
                            //int samplesPerSubset = 30;
                            //Problem subsets = new Problem();
                            //Parameter bestPara = new Parameter();
                            //subsets = Training.BootstrapSampling(trainScaledProblem, parameters, subsetNumber, samplesPerSubset, testScaledProblem, out bestPara); //select subsets using boothtrap sampling method

                            //parameters.C = C;
                            //parameters.Gamma = Gamma;

                            /**
                             * //KNN-Based boundary instance Selection
                             * KNNInstanceSelectionAlgorithm knn = new KNNInstanceSelectionAlgorithm();
                             * int k = 50;
                             * int numberOfSubset = 300; //subset to select for training
                             * Problem dataSubset = knn.computeNearestNeighbour(k, trainScaledProblem, numberOfSubset);
                             * //Problem dataSubset = knn.computeNearestNeighbour(k, trainScaledProblem, numberOfSubset);
                             * ParameterSelection.Grid(dataSubset, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(dataSubset, parameters);
                             **/

                            /**
                             * AntColonyOptimizationBoundarySelection aco = new AntColonyOptimizationBoundarySelection();
                             * int numberOfSubset = 500; //subset to select for training
                             * int kNum = 300;
                             * //Problem dataSubset = aco.ACOBoundarySelection(train, numberOfSubset, kNum);
                             * Problem dataSubset = aco.ACOBoundarySelection(trainScaledProblem, numberOfSubset, kNum);
                             * ParameterSelection.Grid(dataSubset, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(dataSubset, parameters);
                             **/

                            /**
                             * //FFA_Based Instance Selection
                             * FireflyInstanceSelection fi = new FireflyInstanceSelection();
                             * Problem subP = fi.firefly_simple(trainScaledProblem, out storagePercentage);
                             * ParameterSelection.Grid(subP, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(subP, parameters);
                             */

                            /**
                             * //Flower Pollination Algorithm (FPA) Based Instance Selection
                             * FlowerPollinationAlgorithm fpa = new FlowerPollinationAlgorithm();
                             * //Problem subP = fpa.FlowerPollination(trainScaledProblem);
                             * Problem subP = fpa.BinaryFlowerPollination(trainScaledProblem, out storagePercentage);
                             * ParameterSelection.Grid(subP, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(subP, parameters);
                             **/

                            /**
                             * //Cuckoo Search Algorithm
                             * CuckooSearchAlgorithm csa = new CuckooSearchAlgorithm();
                             * Problem subP = csa.CuckooSearch(trainScaledProblem, out storagePercentage);
                             * ParameterSelection.Grid(subP, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(subP, parameters);
                             **/


                            //Social Spider Algorithms
                            SocialSpiderAlgorithm ss = new SocialSpiderAlgorithm();
                            Problem subP             = ss.SocialSpider(trainScaledProblem, out storagePercentage);
                            ParameterSelection.Grid(subP, parameters, "params.txt", out C, out Gamma);
                            parameters.C     = C;
                            parameters.Gamma = Gamma;
                            Model model = Training.Train(subP, parameters);

                            /**
                             * //Bat Algorithm (BA) Based Instance Selection
                             * BatAlgorithm bat = new BatAlgorithm();
                             * Problem subP = bat.BinaryBat(trainScaledProblem, out storagePercentage);
                             * ParameterSelection.Grid(subP, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(subP, parameters);
                             */

                            /**
                             * //Clustering-Based Instance Selection Algorithm
                             * Problem boundaryInstance = Training.ClusteringBoundaryInstance(trainScaledProblem);
                             * ParameterSelection.Grid(boundaryInstance, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(boundaryInstance, parameters);
                             **/

                            /**
                             * //Edge Instance Selection
                             * Problem edgeNN = Training.EdgeInstanceSelection(trainScaledProblem);
                             * ParameterSelection.Grid(edgeNN, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(edgeNN, parameters);
                             **/

                            /**
                             * //Hybrid: Clustering + FFA + EISA
                             * Problem boundaryInstance = Training.ClusteringBoundaryInstance(trainScaledProblem);
                             * FireflyInstanceSelection fi = new FireflyInstanceSelection();
                             * Problem subP = fi.firefly_simple(boundaryInstance);
                             * Problem edgeNN = Training.EdgeInstanceSelection(subP);
                             * ParameterSelection.Grid(edgeNN, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(edgeNN, parameters);
                             */

                            /**
                             * //Hybrid: FFA + EISA
                             * FireflyInstanceSelection fi = new FireflyInstanceSelection();
                             * Problem subP = fi.firefly_simple(trainScaledProblem);
                             * Problem edgeNN = Training.EdgeInstanceSelection(subP);
                             * ParameterSelection.Grid(edgeNN, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(edgeNN, parameters);
                             **/

                            /**
                             * //Hybrid: KNN-based based + FFA-Based
                             * //Problem boundaryInstance = Training.ClusteringBoundaryInstance(trainScaledProblem);
                             * FireflyInstanceSelection fi = new FireflyInstanceSelection();
                             * Problem subP = fi.firefly_simple(trainScaledProblem);
                             * int k = 50;
                             * int numberOfSubset = 100; //subset to select for training
                             * Problem dataSubset = Training.computeNearestNeighbour(k, subP, numberOfSubset);
                             * ParameterSelection.Grid(dataSubset, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(subP, parameters);
                             */

                            /**
                             * //Hybrid: Clustering-Based + FFA-Based
                             * Problem boundaryInstance = Training.ClusteringBoundaryInstance(trainScaledProblem);
                             * FireflyInstanceSelection fi = new FireflyInstanceSelection();
                             * Problem subP = fi.firefly_simple(boundaryInstance);
                             * ParameterSelection.Grid(subP, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(boundaryInstance, parameters);
                             **/

                            /**
                             * //Hybrid: Clustering-Based + Flower Pollination Algorithm
                             * FlowerPollinationAlgorithm fpa = new FlowerPollinationAlgorithm();
                             * Problem boundaryInstance = Training.ClusteringBoundaryInstance(trainScaledProblem);
                             * Problem subP = fpa.FlowerPollination(trainScaledProblem);
                             * ParameterSelection.Grid(subP, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(boundaryInstance, parameters);
                             **/

                            /* //Hybrid: Clustering based + FFA-Based + KNN-Based
                             * Problem boundaryInstance = Training.ClusteringBoundaryInstance(trainScaledProblem);
                             * FireflyInstanceSelection fi = new FireflyInstanceSelection();
                             * Problem subP = fi.firefly_simple(boundaryInstance);
                             * int k = 50;
                             * int numberOfSubset = 100; //subset to select for training
                             * Problem dataSubset = Training.computeNearestNeighbour(k, boundaryInstance, numberOfSubset);
                             * ParameterSelection.Grid(dataSubset, parameters, "params.txt", out C, out Gamma);
                             * parameters.C = C;
                             * parameters.Gamma = Gamma;
                             * Model model = Training.Train(dataSubset, parameters);
                             */
                            //Train the model using the optimal parameters.
                            //Model model = Training.Train(trainScaledProblem, parameters);
                            //removing support vectors that contributes less to the decision surface
                            //Model submod = Training.performSupportVectorReduction(model, trainScaledProblem);

                            //Perform classification on the test data, putting the results in results.txt.
                            //classficatnAccuracySum += Prediction.Predict(testScaledProblem, "ClassificationResults.txt", model, false);
                            //classficatnAccuracy = Prediction.Predict(test, "ClassificationResults.txt", model, false); //classfication accuracy for each iteration ->for the purpose of outputting to the text file
                            classficatnAccuracy     = Prediction.Predict(testScaledProblem, "ClassificationResults.txt", model, false); //classfication accuracy for each iteration ->for the purpose of outputting to the text file
                            classficatnAccuracySum += classficatnAccuracy;
                            Console.WriteLine("\nClassification Accuracy: {0}%", 100 * classficatnAccuracy);

                            PerformanceEvaluator pp = new PerformanceEvaluator("ClassificationResults.txt", test, out TP, out TN, out FP, out FN, out P, out F_M);

                            DateTime end1      = DateTime.Now;
                            TimeSpan duration1 = end1 - start1;
                            double   time1     = duration1.Minutes * 60.0 + duration1.Seconds + duration1.Milliseconds / 1000.0;

                            sumTP += TP; sumTN += TN; sumFP += FP; sumFN += FN; sumP += P; sumF_M += F_M; sumTime += time1; sumStoragePercentage += storagePercentage;

                            //saving all the output to a file
                            string outpt = string.Format("Cross Validation: {0}, Run number {1}, CAccuracy: {2:0.0000} FP: {3:0.0000}, FN: {4:0.0000}, Recall: {5:0.0000}, Precision: {6:0.0000}, FMeasure: {7:0.0000}, Time: {8} Seconds, Storage Percentage: {9}", a + 1, r + 1, (classficatnAccuracy * 100), (FP * 100), (FN * 100), (TP * 100), (P * 100), (F_M * 100), time1, storagePercentage);
                            File.AppendAllText(outputEvaluationFilePath, outpt);
                            File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
                        }
                        if (classficatnAccuracy * 100 > globalBest)
                        {
                            globalBest = classficatnAccuracy * 100;
                        }
                    }

                    classficatnAccuracySum = (classficatnAccuracySum * 100) / N_Fold; //converting to percentage and dividing by the number of folds
                    sumFP  = (sumFP * 100) / N_Fold;                                  //calculating the average cross validation for False Positive over 10 folds
                    sumTP  = (sumTP * 100) / N_Fold;                                  //calculating the average cross validation for Recall over 10 folds
                    sumFN  = (sumFN * 100) / N_Fold;                                  //calculating the average cross validation for False Negative over 10 folds
                    sumF_M = (sumF_M * 100) / N_Fold;                                 //calculating the average cross validation for F Measure over 10 folds
                    sumP   = (sumP * 100) / N_Fold;                                   //calculating the average cross validation for Precision over 10 folds
                    sumStoragePercentage = sumStoragePercentage / N_Fold;

                    avgRuns += classficatnAccuracySum;
                    avgFP   += sumFP;
                    avgFN   += sumFN;
                    avgR    += sumTP;
                    avgPr   += sumP;
                    avgFM   += sumF_M;
                    avgTime += sumTime;
                    avgStoragePercentage += sumStoragePercentage;

                    //saving all the outputs to a file
                    File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
                    File.AppendAllText(outputEvaluationFilePath, string.Format("Average Calculations....Run Number: {0}", r + 1));
                    File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
                    string outpt2 = string.Format("Run number {0}, Average CAccuracy: {1:0.0000} FP: {2:0.0000}, FN: {3:0.0000}, Recall: {4:0.0000}, Precision: {5:0.0000}, FMeasure: {6:0.0000}, Time: {7}, Storage Percentage: {8}", r + 1, classficatnAccuracySum, sumFP, sumFN, sumTP, sumP, sumF_M, sumTime, sumStoragePercentage);
                    File.AppendAllText(outputEvaluationFilePath, outpt2);
                    File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
                    File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);

                    Console.WriteLine("\nStep {0}...............................\n", r + 1);
                }
            }
            //}

            DateTime end      = DateTime.Now;
            TimeSpan duration = end - start;
            double   time     = duration.Minutes * 60.0 + duration.Seconds + duration.Milliseconds / 1000.0;

            Console.WriteLine("\nAverage processing time {0:########.00} seconds\n", avgTime / n_Runs);
            //Console.WriteLine("\nTotal processing time {0:########.00} seconds\n", time);

            File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
            File.AppendAllText(outputEvaluationFilePath, "Average processing time:\n" + avgTime / n_Runs + " Seconds");
            File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);

            //sending all the outputs to the screen
            Console.WriteLine("\nOverall Average Accuracy: {0:0.00}% \nGlobal Best: {1:0.00}%", avgRuns / n_Runs, globalBest);
            Console.WriteLine("\n\nTotal False Positive: {0:0.00}%\nTotal False Negative: {1:0.00}%\nRecall: {2:0.00}%\nPrecision: {3:0.00}%\nF_Measure: {4:0.00}% \nStorage Percentage: {5:0.00}%", (avgFP / n_Runs), (avgFN / n_Runs), (avgR / n_Runs), (avgPr / n_Runs), (avgFM / n_Runs), (avgStoragePercentage / n_Runs));

            File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
            File.AppendAllText(outputEvaluationFilePath, "Overall Average Calculations.......");
            File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
            File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
            string outpt3 = string.Format("Overall Average CAccuracy: {0:0.0000} FP: {1:0.0000}, FN: {2:0.0000}, Recall: {3:0.0000}, Precision: {4:0.0000}, FMeasure: {5:0.0000}, Storage Percentage: {6:0.0000}", avgRuns / n_Runs, avgFP / n_Runs, avgFN / n_Runs, avgR / n_Runs, avgPr / n_Runs, avgFM / n_Runs, avgStoragePercentage / n_Runs);

            File.AppendAllText(outputEvaluationFilePath, outpt3);

            Console.ReadKey();
        }
コード例 #20
0
        static void Main(string[] args)
        {
            PDSClass           pds = new PDSClass();
            FireFlyAlgorithm   ff  = new FireFlyAlgorithm();
            ParameterSelection ps  = new ParameterSelection();
            //reading the phishing emails and the test Datas
            string testFileFolderName = "TrainTestData";
            string trainTestDataPath  = String.Format(Environment.CurrentDirectory + "\\{0}", testFileFolderName); //filepath for the training and test dataset

            string[] trainTestFileURL = System.IO.Directory.GetFileSystemEntries(trainTestDataPath);

            string extractedVectorsFolderName = "ExtractedFeatures";
            string extractedVectorsFilePath   = String.Format(Environment.CurrentDirectory + "\\{0}", extractedVectorsFolderName);

            string[] evFileURL = System.IO.Directory.GetFileSystemEntries(extractedVectorsFilePath);

            string outputEvaluationFilePath = String.Format(Environment.CurrentDirectory + "\\{0}", "OutputEvaluations.txt");

            //string SVAccuracyFilePath = String.Format(Environment.CurrentDirectory + "\\{0}", "SVAccuracy.txt");

            File.WriteAllText(ps.SVAccuracyFilePath, string.Empty);
            File.WriteAllText(outputEvaluationFilePath, string.Empty); //deleting the contents of the file holding the results for new results
            File.WriteAllText(ps.filePath, string.Empty);              //deleting the contents of the file holding the extracted C,Gamma and CV values
            File.WriteAllText(pds.extractedFeaturesFilePathTrainDS, string.Empty);
            File.WriteAllText(ps.filePath2, string.Empty);
            File.WriteAllText(ps.filePath3, string.Empty);

            int    NumofFeatures  = 17;
            double DatasetEntropy = 0.0;
            int    TotalMails;

            string[] features = pds.Features(); //saving the features in the string array

            int[,] HamPhishCount = new int[, ] {
            };                                     //count for the total number of ham and phish emails

            string[] testTrainFolderFiles = new string[] { };
            string   folderFiles;

            double[] informationGain = new double[NumofFeatures];
            Dictionary <string, double> feat_infoGain = pds.Feature_InfoGain(features, informationGain); //saving the information gain for all the individual features

            double   classficatnAccuracySum = 0.0;                                                       //cummulative classification accuracy
            double   classficatnAccuracy    = 0;                                                         //classification accuracy for each iteration
            DateTime start = DateTime.Now;
            double   totalPhishing = 0, totalHam = 0; int mailGrandTotal = 0;

            //initializing all the variables that will used for evaluating the performance of the classifier
            double FP = 0.0, FN = 0.0, TP = 0.0, TN = 0.0, P = 0.0, F_M = 0.0, sumFP = 0.0, sumF_M = 0.0, sumFN = 0.0, sumTP = 0.0, sumTN = 0.0, sumP = 0.0;

            int N_Fold = 10; //number of folds
            int n_Runs = 1;  //number of runs

            double[,] NormalizedVector = new double[, ] {
            };                                              //normalized vector values for each features

            Program p = new Program();
            double  avgRuns = 0.0, avgFP = 0.0, avgFN = 0.0, avgR = 0.0, avgPr = 0.0, avgFM = 0.0; //avg=> average
            double  globalBest = double.MinValue;

            //change the boolean value appropriately to choose the task you want to perform (either vector value extraction or email classification)
            //both values should not be true. This is to reduce processing time
            bool extractVectorValues = true;
            bool Emailclassification = false;

            double C     = new double();
            double Gamma = new double();

            //double[] CV = new double[2];
            List <double> CV1 = new List <double>(); //Save the CV accuracy, C and Gamma for comparison
            List <double> CV2 = new List <double>(); //Save the CV accuracy, C and Gamma for comparison

            for (int aa = 0; aa < n_Runs; aa++)
            {
                classficatnAccuracySum = 0.0;
                sumFP  = 0.0;
                sumTP  = 0.0; //Recall
                sumFN  = 0.0;
                sumF_M = 0.0;
                sumP   = 0.0;
                for (int a = 0; a < N_Fold; a++)
                {
                    if (extractVectorValues == true)                                               //if the value of ExtractVectorValues is true, only extract email vector values and dont perform classification
                    {
                        n_Runs = 1;                                                                // change number of runs from its default value (i.e 10) to 1 (to avoid repeating the extraction process 10 times) since we wanna do is to extract the vector values from each emails
                        string[] trainFileURLs = new string[] { };                                 //urls for the train emails (i.e. the emails used for training the classifier)
                        string[] testFileURLs  = new string[] { };                                 //urls for the test emails (i.e. the emails used for testing the classifier)
                        Dictionary <string, int> trainMail_Class = new Dictionary <string, int>(); //variable containing the emails and classes of all the training emails
                        Dictionary <string, int> testMail_Class  = new Dictionary <string, int>(); //variable containing the emails and classes of all the test emails
                        string[] trainMailFileNames = new string[trainMail_Class.Count];           //the file names for all the emails in the training dataset
                        string[] testMailFileNames = new string[] { };                             //the file names for all the emails in the test dataset
                        string   trainMailLabel; int phishCount = 0, hamCount = 0; double phishPercentage, hamPercentage;

                        //processing the training dataset for the each fold
                        for (int i = 0; i < trainTestFileURL.Length; i++)
                        {
                            if (i.Equals(a))
                            {
                                continue;                                                                               //skipping one email folder, which is to be used for testing the trained classifier (i.e. the current test dataset)
                            }
                            testTrainFolderFiles = System.IO.Directory.GetFiles(trainTestFileURL[i]);                   //getting the filenames of all the emails in the training dataset
                            trainFileURLs        = trainFileURLs.Concat(testTrainFolderFiles).ToArray();                //get all the urls for the test emails
                            trainMailFileNames   = pds.getFileNames(trainFileURLs);                                     //get the file names for all the test mails
                            for (int j = 0; j < testTrainFolderFiles.Length; j++)                                       //processing all the emails in the current training dataset for classification
                            {
                                trainMailLabel = trainMailFileNames[j].Substring(0, 2);                                 //getting the label for each email, HM(for Ham Mails) and PM(for Phish Mails)
                                folderFiles    = File.ReadAllText(testTrainFolderFiles[j]);                             //extracting the content of each email in each email folder
                                trainMail_Class[pds.ProcessMails(folderFiles)] = (trainMailLabel.Equals("PM")) ? 1 : 0; //processing each email and assigning label to the emails based on the folders each emails come from.
                                if (trainMail_Class.ElementAt(j).Value == 1)
                                {
                                    phishCount++; //counting the total number of ham and phishing to get their percentage
                                }
                                else
                                {
                                    hamCount++;
                                }
                            }
                        }

                        //processing the test dataset for each fold
                        for (int i = a; i < a + 1; i++)
                        {
                            testTrainFolderFiles = System.IO.Directory.GetFiles(trainTestFileURL[i]);
                            testFileURLs         = testFileURLs.Concat(testTrainFolderFiles).ToArray();
                            testMailFileNames    = pds.getFileNames(testFileURLs);
                            for (int j = 0; j < testTrainFolderFiles.Length; j++)
                            {
                                trainMailLabel = testMailFileNames[j].Substring(0, 2);
                                folderFiles    = File.ReadAllText(testTrainFolderFiles[j]);
                                testMail_Class[pds.ProcessMails(folderFiles)] = (trainMailLabel.Equals("PM")) ? 1 : 0; //processing each email and assigning label to the emails based on the folders each emails come from.
                                if (testMail_Class.ElementAt(j).Value == 1)
                                {
                                    phishCount++;
                                }
                                else
                                {
                                    hamCount++;
                                }
                            }
                        }

                        //calculating the percentage of phishing and ham email in the dataset
                        phishPercentage = (double)phishCount / (double)(trainMail_Class.Count + testMail_Class.Count);
                        hamPercentage   = (double)hamCount / (double)(trainMail_Class.Count + testMail_Class.Count);
                        mailGrandTotal  = phishCount + hamCount;
                        totalHam        = hamCount; totalPhishing = phishCount;

                        //Information Gain
                        TotalMails    = trainMail_Class.Count;
                        int[,] vector = new int[TotalMails, NumofFeatures];
                        pds.processVector(vector, trainMail_Class, features, trainFileURLs, NumofFeatures);                //extracting the vector values of all the features
                        HamPhishCount = new int[NumofFeatures, 4];
                        pds.FeatureVectorSum(NumofFeatures, TotalMails, vector, trainMail_Class, HamPhishCount);           // calculating the total number of zeros and ones for both phishing and ham emails
                        DatasetEntropy = pds.Entropy(trainMail_Class);                                                     //calculating the entropy for the entire dataset
                        pds.CalInformationGain(NumofFeatures, HamPhishCount, informationGain, TotalMails, DatasetEntropy); //calculating information gain for each feature
                        feat_infoGain = pds.Feature_InfoGain(features, informationGain);                                   //assisgning the calculated information gain to each feature

                        //process vector for training Dataset
                        int      NumofFeatures2 = NumofFeatures - 8;
                        string[] newFeatures    = new string[NumofFeatures2];
                        for (int i = 0; i < NumofFeatures2; i++)
                        {
                            newFeatures[i] = feat_infoGain.ElementAt(i).Key; //copying the best 8 features with the highest information gain
                        }

                        TotalMails = trainMail_Class.Count;
                        vector     = new int[TotalMails, NumofFeatures2];
                        pds.processVector(vector, trainMail_Class, newFeatures, trainFileURLs, NumofFeatures2);
                        NormalizedVector = ff.Normalize(vector); //normalize the all vector values for train data

                        //extract vectors of the training data
                        pds.extractVectors(vector, trainMail_Class, NumofFeatures2, "trainingDS", a);

                        //process vector for testing Dataset
                        TotalMails = testMail_Class.Count;
                        vector     = new int[TotalMails, NumofFeatures2];
                        pds.processVector(vector, testMail_Class, newFeatures, testFileURLs, NumofFeatures2);
                        NormalizedVector = ff.Normalize(vector); //normalize the all vector values for test data

                        //extract vectors of the test data
                        pds.extractVectors(vector, testMail_Class, NumofFeatures2, "testDS", a);
                    }

                    if (Emailclassification == true) //if the value of EmailClassification is true, perform email classification and dont extract emails
                    {
                        //SVM Classfication begins here
                        //First, read in the training and test data.
                        Problem train = Problem.Read(string.Format("ExtractedFeaturesTrain{0}.{1}", (a + 1).ToString(), "txt"));
                        Problem test  = Problem.Read(string.Format("ExtractedFeaturesTest{0}.{1}", (a + 1).ToString(), "txt"));

                        //scalling the data
                        GaussianTransform gt = GaussianTransform.Compute(train);
                        Problem           trainScaledProblem = gt.Scale(train);
                        Problem           testScaledProblem  = gt.Scale(test);

                        //For this example (and indeed, many scenarios), the default parameters will suffice.
                        Parameter parameters = new Parameter();
                        //double C = new double();
                        //double Gamma = new double();

                        Console.WriteLine("\nClassification Number {0} Step: {1}...............................\n", a + 1, aa + 1);

                        //This will do a grid optimization to find the best parameters and store them in C and Gamma, outputting the entire
                        //search to params.txt.

                        /*
                         * if (a == 0)
                         * {
                         *   ParameterSelection.Grid(trainScaledProblem, parameters, "params.txt", out C, out Gamma);
                         *   CV1.Add(ParameterSelection.CVAccuracy);
                         *   CV1.Add(C);
                         *   CV1.Add(Gamma);
                         * }
                         * else if (a == 1)
                         * {
                         *   ParameterSelection.Grid(trainScaledProblem, parameters, "params.txt", out C, out Gamma);
                         *   CV2.Add(ParameterSelection.CVAccuracy);
                         *   CV2.Add(C);
                         *   CV2.Add(Gamma);
                         *
                         *   if (CV1[0] > CV2[0]) //if the previous CV rate is greater than the present, then, discard the present and use the C and Gamma of the previous.
                         *   {
                         *       C = CV1[1];
                         *       Gamma = CV1[2];
                         *   }
                         *
                         * }*/

                        //Bootstrap Sampling Method
                        //Training.samplingGellingPoint(trainScaledProblem, testScaledProblem);
                        //int subsetNumber = 5;
                        //int samplesPerSubset = 30;
                        //Problem subsets = new Problem();
                        //Parameter bestPara = new Parameter();
                        //subsets = Training.BootstrapSampling(trainScaledProblem, parameters, subsetNumber, samplesPerSubset, testScaledProblem, out bestPara); //select subsets using boothtrap sampling method

                        //parameters.C = C;
                        //parameters.Gamma = Gamma;



                        //KNN-Based boundary instance Selection
                        int     k = 50;
                        int     numberOfSubset = 300; //subset to select for training
                        Problem dataSubset     = Training.computeNearestNeighbour(k, trainScaledProblem, numberOfSubset);
                        ParameterSelection.Grid(dataSubset, parameters, "params.txt", out C, out Gamma);
                        parameters.C     = C;
                        parameters.Gamma = Gamma;
                        //Model model = Training.buildModel(dataSubset, parameters);
                        Model model = Training.Train(dataSubset, parameters);


                        //ParameterSelection.Grid(boundaryInstance, parameters, "params.txt", out C, out Gamma);
                        //ParameterSelection.Grid(trainScaledProblem, parameters, "params.txt", out C, out Gamma);

                        /* //FFA_Based Instance Selection
                         * FireflyInstanceSelection fi = new FireflyInstanceSelection();
                         * Problem subP = fi.firefly_simple(trainScaledProblem);
                         * ParameterSelection.Grid(subP, parameters, "params.txt", out C, out Gamma);
                         * parameters.C = C;
                         * parameters.Gamma = Gamma;
                         * Model model = Training.Train(subP, parameters);
                         */

                        /*
                         * //Clustering-Based Instance Selection Algorithm
                         * Problem boundaryInstance = Training.ClusteringBoundaryInstance(trainScaledProblem);
                         * ParameterSelection.Grid(boundaryInstance, parameters, "params.txt", out C, out Gamma);
                         * parameters.C = C;
                         * parameters.Gamma = Gamma;
                         * Model model = Training.Train(boundaryInstance, parameters);
                         */

                        /* //Edge Instance Selection
                         * Problem edgeNN = Training.EdgeInstanceSelection(trainScaledProblem);
                         * ParameterSelection.Grid(edgeNN, parameters, "params.txt", out C, out Gamma);
                         * parameters.C = C;
                         * parameters.Gamma = Gamma;
                         * Model model = Training.Train(edgeNN, parameters);
                         */

                        /*
                         * //Hybrid: KNN-based based + FFA-Based
                         * //Problem boundaryInstance = Training.ClusteringBoundaryInstance(trainScaledProblem);
                         * FireflyInstanceSelection fi = new FireflyInstanceSelection();
                         * Problem subP = fi.firefly_simple(trainScaledProblem);
                         * int k = 50;
                         * int numberOfSubset = 100; //subset to select for training
                         * Problem dataSubset = Training.computeNearestNeighbour(k, subP, numberOfSubset);
                         * ParameterSelection.Grid(dataSubset, parameters, "params.txt", out C, out Gamma);
                         * parameters.C = C;
                         * parameters.Gamma = Gamma;
                         * Model model = Training.Train(subP, parameters);
                         */

                        /*
                         * //Hybrid: Clustering-Based + FFA-Based
                         * Problem boundaryInstance = Training.ClusteringBoundaryInstance(trainScaledProblem);
                         * FireflyInstanceSelection fi = new FireflyInstanceSelection();
                         * Problem subP = fi.firefly_simple(boundaryInstance);
                         * ParameterSelection.Grid(subP, parameters, "params.txt", out C, out Gamma);
                         * parameters.C = C;
                         * parameters.Gamma = Gamma;
                         * Model model = Training.Train(boundaryInstance, parameters);
                         */

                        /* //Hybrid: Clustering based + FFA-Based + KNN-Based
                         * Problem boundaryInstance = Training.ClusteringBoundaryInstance(trainScaledProblem);
                         * FireflyInstanceSelection fi = new FireflyInstanceSelection();
                         * Problem subP = fi.firefly_simple(boundaryInstance);
                         * int k = 50;
                         * int numberOfSubset = 100; //subset to select for training
                         * Problem dataSubset = Training.computeNearestNeighbour(k, boundaryInstance, numberOfSubset);
                         * ParameterSelection.Grid(dataSubset, parameters, "params.txt", out C, out Gamma);
                         * parameters.C = C;
                         * parameters.Gamma = Gamma;
                         * Model model = Training.Train(dataSubset, parameters);
                         */
                        //Train the model using the optimal parameters.
                        //Model model = Training.Train(trainScaledProblem, parameters);
                        //removing support vectors that contributes less to the decision surface
                        //Model submod = Training.performSupportVectorReduction(model, trainScaledProblem);

                        //Perform classification on the test data, putting the results in results.txt.
                        //classficatnAccuracySum += Prediction.Predict(testScaledProblem, "ClassificationResults.txt", model, false);
                        classficatnAccuracy     = Prediction.Predict(testScaledProblem, "ClassificationResults.txt", model, false); //classfication accuracy for each iteration ->for the purpose of outputting to the text file
                        classficatnAccuracySum += classficatnAccuracy;
                        Console.WriteLine("\nClassification Accuracy: {0}%", 100 * classficatnAccuracy);

                        PerformanceEvaluator pp = new PerformanceEvaluator("ClassificationResults.txt", test, out TP, out TN, out FP, out FN, out P, out F_M);

                        sumTP += TP; sumTN += TN; sumFP += FP; sumFN += FN; sumP += P; sumF_M += F_M;

                        //saving all the output to a file
                        string outpt = string.Format("Cross Validation: {0}, Run number {1}, CAccuracy: {2:0.0000} FP: {3:0.0000}, FN: {4:0.0000}, Recall: {5:0.0000}, Precision: {6:0.0000}, FMeasure: {7:0.0000}", a + 1, aa + 1, (classficatnAccuracy * 100), (FP * 100), (FN * 100), (TP * 100), (P * 100), (F_M * 100));
                        File.AppendAllText(outputEvaluationFilePath, outpt);
                        File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
                    }

                    if (classficatnAccuracy * 100 > globalBest)
                    {
                        globalBest = classficatnAccuracy * 100;
                    }
                }

                classficatnAccuracySum = (classficatnAccuracySum * 100) / N_Fold; //converting to percentage and dividing by the number of folds
                sumFP  = (sumFP * 100) / N_Fold;                                  //calculating the average cross validation for False Positive over 10 folds
                sumTP  = (sumTP * 100) / N_Fold;                                  //calculating the average cross validation for Recall over 10 folds
                sumFN  = (sumFN * 100) / N_Fold;                                  //calculating the average cross validation for False Negative over 10 folds
                sumF_M = (sumF_M * 100) / N_Fold;                                 //calculating the average cross validation for F Measure over 10 folds
                sumP   = (sumP * 100) / N_Fold;                                   //calculating the average cross validation for Precision over 10 folds


                avgRuns += classficatnAccuracySum;
                avgFP   += sumFP;
                avgFN   += sumFN;
                avgR    += sumTP;
                avgPr   += sumP;
                avgFM   += sumF_M;

                //saving all the outputs to a file
                File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
                File.AppendAllText(outputEvaluationFilePath, string.Format("Average Calculations....Run Number: {0}", aa + 1));
                File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
                string outpt2 = string.Format("Run number {0}, Average CAccuracy: {1:0.0000} FP: {2:0.0000}, FN: {3:0.0000}, Recall: {4:0.0000}, Precision: {5:0.0000}, FMeasure: {6:0.0000}", aa + 1, classficatnAccuracySum, sumFP, sumFN, sumTP, sumP, sumF_M);
                File.AppendAllText(outputEvaluationFilePath, outpt2);
                File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
                File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);

                Console.WriteLine("\nStep {0}...............................\n", aa + 1);
            }

            DateTime end      = DateTime.Now;
            TimeSpan duration = end - start;
            double   time     = duration.Minutes * 60.0 + duration.Seconds + duration.Milliseconds / 1000.0;

            Console.WriteLine("\nTotal processing time {0:########.00} seconds\n", time);

            File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
            File.AppendAllText(outputEvaluationFilePath, "Total processing time:\n" + time + " Seconds");
            File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);

            //sending all the outputs to the screen
            Console.WriteLine("\nOverall Average Accuracy: {0:0.00}% \nGlobal Best: {1:0.00}%", avgRuns / n_Runs, globalBest);
            Console.WriteLine("\n\nTotal False Positive: {0:0.00}%\nTotal False Negative: {1:0.00}%\nRecall: {2:0.00}%\nPrecision: {3:0.00}%\nF_Measure: {4:0.00}%", (avgFP / n_Runs), (avgFN / n_Runs), (avgR / n_Runs), (avgPr / n_Runs), (avgFM / n_Runs));

            File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
            File.AppendAllText(outputEvaluationFilePath, "Overall Average Calculations.......");
            File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
            File.AppendAllText(outputEvaluationFilePath, Environment.NewLine);
            string outpt3 = string.Format("Overall Average CAccuracy: {0:0.0000} FP: {1:0.0000}, FN: {2:0.0000}, Recall: {3:0.0000}, Precision: {4:0.0000}, FMeasure: {5:0.0000}", avgRuns / n_Runs, avgFP / n_Runs, avgFN / n_Runs, avgR / n_Runs, avgPr / n_Runs, avgFM / n_Runs);

            File.AppendAllText(outputEvaluationFilePath, outpt3);

            Console.ReadKey();
        }
コード例 #21
0
ファイル: SVM.cs プロジェクト: viktorbilousov/TextTonality
        /// <summary>
        /// Возвращает тип класса к которому относится вектор
        /// </summary>
        /// <param name="testVector">Вектор</param>
        /// <returns></returns>
        public double Test(Node[] testVector)
        {
            double assignment = Prediction.Predict(model, testVector);

            return(assignment);
        }
コード例 #22
0
        private void TestClassifier()
        {
            Problem test = Problem.Read("test_samples");

            Prediction.Predict(test, "result", model_svm, false);
        }
コード例 #23
0
        public double predict(Node[] test)
        {
            double preRes = Prediction.Predict(model, test);

            return(preRes);
        }
        private void button2_Click(object sender, EventArgs e)
        {
            pictureBox2.Image = (Bitmap)pictureBox1.Image.Clone();
            Bitmap         src        = new Bitmap(pictureBox2.Image);
            Bitmap         res        = new Bitmap(pictureBox2.Image);
            SaveFileDialog saveDialog = new SaveFileDialog();

            src = resize(src, new Size(200, 200));
            res = resize(res, new Size(200, 200));
            pictureBox2.Image = src;
            srcImg            = src;
            pictureBox2.Image = res;
            Bitmap sampleImage = new Bitmap(pictureBox2.Image);
            var    rect        = new Rectangle(0, 0, sampleImage.Width, sampleImage.Height);
            var    data        = sampleImage.LockBits(rect, ImageLockMode.ReadWrite, sampleImage.PixelFormat);
            var    depth       = Bitmap.GetPixelFormatSize(data.PixelFormat) / 8; //bytes per pixel

            var buffer = new byte[data.Width * data.Height * depth];

            //copy pixels to buffer
            Marshal.Copy(data.Scan0, buffer, 0, buffer.Length);

            System.Threading.Tasks.Parallel.Invoke(
                () =>
            {
                //upper-left
                Process(buffer, 0, 0, data.Width / 2, data.Height / 2, data.Width, depth);
            },
                () =>
            {
                //upper-right
                Process(buffer, data.Width / 2, 0, data.Width, data.Height / 2, data.Width, depth);
            },
                () =>
            {
                //lower-left
                Process(buffer, 0, data.Height / 2, data.Width / 2, data.Height, data.Width, depth);
            },
                () =>
            {
                //lower-right
                Process(buffer, data.Width / 2, data.Height / 2, data.Width, data.Height, data.Width, depth);
            }
                );

            //Copy the buffer back to image
            Marshal.Copy(buffer, 0, data.Scan0, buffer.Length);

            sampleImage.UnlockBits(data);
            pictureBox2.Image = sampleImage;
            dstImg            = sampleImage;
            void Process(byte[] buffer1, int x, int y, int endx, int endy, int width, int depth1)
            {
                for (int i = x; i < endx; i++)
                {
                    for (int j = y; j < endy; j++)
                    {
                        var offset = ((j * width) + i) * depth;
                        var B      = buffer[offset + 0];
                        var G      = buffer[offset + 1];
                        var R      = buffer[offset + 2];
                        var a      = Math.Max(R, Math.Max(B, G));
                        var b      = Math.Min(R, Math.Min(B, G));
                        if (!(((R > 95) && (G > 40) && (B > 20) && ((a - b) > 15) && (Math.Abs(R - G) > 15) && (R > G) && (R > B)) || ((R > 220) && (G > 210) && (B > 170) && ((a - b) > 15) && (Math.Abs(R - G) > 15) && (R > G) && (G > B))))
                        {
                            buffer[offset + 0] = buffer[offset + 1] = buffer[offset + 2] = 0;
                        }
                        else
                        {
                            buffer[offset + 0] = buffer[offset + 1] = buffer[offset + 2] = 255;
                        }
                    }
                }
            }

            //Graysacle
            GrayscaleBT709 filter = new GrayscaleBT709();

            pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image);
            dstImg            = filter.Apply(dstImg);
            //Dilatation
            try
            {
                Dilatation filter1 = new Dilatation();
                pictureBox2.Image = filter1.Apply((Bitmap)pictureBox2.Image);
                dstImg            = filter1.Apply(dstImg);
            }
            catch (Exception)
            {
                System.Windows.Forms.MessageBox.Show("Apply Grayscale");
            }
            //Biggest Blob Extraction
            ExtractBiggestBlob filter2 = new ExtractBiggestBlob();

            pictureBox2.Image = filter2.Apply((Bitmap)pictureBox2.Image);
            dstImg            = filter2.Apply(dstImg);
            blob = filter2.BlobPosition;
            Bitmap newBmp = new Bitmap(dstImg.Width, dstImg.Height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);

            using (Graphics gfx = Graphics.FromImage(newBmp))
            {
                gfx.DrawImage(dstImg, 0, 0);
            }
            //newBmp = dstImg;
            for (int i = 0; i < dstImg.Width; i++)
            {
                for (int j = 0; j < dstImg.Height; j++)
                {
                    System.Drawing.Color srcColor = srcImg.GetPixel(i + blob.X, j + blob.Y);
                    System.Drawing.Color dstColor = dstImg.GetPixel(i, j);
                    if (!(dstColor.R >= 0 && dstColor.R <= 10 && dstColor.G >= 0 && dstColor.G <= 10 && dstColor.B >= 0 && dstColor.B <= 10))
                    {
                        newBmp.SetPixel(i, j, srcColor);
                    }
                }
            }
            dstImg            = newBmp;
            pictureBox2.Image = newBmp;

            List <double> edgeCount  = new List <double>();
            List <double> ratio      = new List <double>();
            int           pixelCount = 0;

            Bitmap         hoefImage  = new Bitmap(pictureBox2.Image);
            GrayscaleBT709 grayFilter = new GrayscaleBT709();

            hoefImage = grayFilter.Apply((Bitmap)pictureBox2.Image);
            CannyEdgeDetector cannyFilter = new CannyEdgeDetector(0, 0, 1.4);

            hoefImage         = cannyFilter.Apply(hoefImage);
            pictureBox2.Image = hoefImage;
            var imgarray = new System.Drawing.Image[36];

            for (int i = 0; i < 6; i++)
            {
                for (int j = 0; j < 6; j++)
                {
                    pixelCount++;
                    var index = i * 6 + j;
                    imgarray[index] = new Bitmap(40, 40);
                    var graphics = Graphics.FromImage(imgarray[index]);
                    graphics.DrawImage(hoefImage, new Rectangle(0, 0, 40, 40), new Rectangle(i * 40, j * 40, 40, 40), GraphicsUnit.Pixel);
                    graphics.Dispose();
                }
            }
            for (int n = 0; n < 36; n++)
            {
                int    counter     = 0;
                Bitmap bufferImage = new Bitmap(imgarray[n]);
                for (int i = 0; i < 40; i++)
                {
                    for (int j = 0; j < 40; j++)
                    {
                        System.Drawing.Color hoefColor = bufferImage.GetPixel(i, j);
                        if (!(hoefColor.R == 0 && hoefColor.G == 0 && hoefColor.B == 0))
                        {
                            counter++;
                        }
                    }
                }
                edgeCount.Add(counter);
            }
            double Total = edgeCount.Sum();

            foreach (double x in edgeCount)
            {
                var a = x / Total;
                ratio.Add(a);
            }

            FileStream   fs = new FileStream(@"E:\test.txt", FileMode.Create, FileAccess.Write);
            StreamWriter sw = new StreamWriter(fs);
            int          no = 0;

            sw.Write((++no) + " ");
            for (int i = 0; i < ratio.Count; ++i)
            {
                sw.Write(i + ":" + ratio[i].ToString() + " ");
            }
            sw.WriteLine();

            sw.Close();
            fs.Close();
            //Support Vector Machine
            Problem train = Problem.Read(@"E:\AI.txt");
            Problem test  = Problem.Read(@"E:\test.txt");

            Parameter parameters = new Parameter();

            double C;
            double Gamma;

            parameters.C = 32; parameters.Gamma = 8;
            Model model = Training.Train(train, parameters);

            Prediction.Predict(test, @"E:\result.txt", model, false);

            FileStream   fs1 = new FileStream(@"E:\result.txt", FileMode.Open, FileAccess.Read);
            StreamReader sw1 = new StreamReader(fs1);
            string       w   = sw1.ReadLine();

            if (w == "1")
            {
                MessageBox.Show("A");
            }
            else if (w == "2")
            {
                MessageBox.Show("B");
            }
            else if (w == "3")
            {
                MessageBox.Show("C");
            }
            else if (w == "4")
            {
                MessageBox.Show("D");
            }
            else if (w == "5")
            {
                MessageBox.Show("E");
            }
            else if (w == "6")
            {
                MessageBox.Show("F");
            }
            else if (w == "7")
            {
                MessageBox.Show("G");
            }
            else if (w == "8")
            {
                MessageBox.Show("H");
            }
            else if (w == "9")
            {
                MessageBox.Show("I");
            }
            else if (w == "10")
            {
                MessageBox.Show("J");
            }
            else if (w == "11")
            {
                MessageBox.Show("K");
            }
            //else { MessageBox.Show("L"); }
        }
コード例 #25
0
        private void resultGestureToolStripMenuItem_Click(object sender, EventArgs e)
        {
            int           dir;
            int           no;
            List <string> filedir = new List <string>(Directory.GetDirectories(path));

            for (dir = 0, no = 0; (dir < filedir.Count && no <= 26); dir++, no++)
            {
                string[]      filePaths = Directory.GetFiles(filedir[dir].ToString());
                List <Bitmap> y         = new List <Bitmap>();
                foreach (var iI in filePaths)
                {
                    Bitmap Image = new Bitmap(iI);
                    y.Add(Image);
                }

                foreach (Bitmap img in y)
                {
                    pictureBox1.Image = img;
                    srcImg            = img;
                    dstImg            = img;
                    Bitmap skin  = new Bitmap(pictureBox1.Image);
                    var    rect  = new Rectangle(0, 0, skin.Width, skin.Height);
                    var    data  = skin.LockBits(rect, ImageLockMode.ReadWrite, skin.PixelFormat);
                    var    depth = Bitmap.GetPixelFormatSize(data.PixelFormat) / 8; //bytes per pixel

                    var buffer = new byte[data.Width * data.Height * depth];

                    //copy pixels to buffer
                    Marshal.Copy(data.Scan0, buffer, 0, buffer.Length);

                    System.Threading.Tasks.Parallel.Invoke(
                        () =>
                    {
                        //upper-left
                        Process(buffer, 0, 0, data.Width / 2, data.Height / 2, data.Width, depth);
                    },
                        () =>
                    {
                        //upper-right
                        Process(buffer, data.Width / 2, 0, data.Width, data.Height / 2, data.Width, depth);
                    },
                        () =>
                    {
                        //lower-left
                        Process(buffer, 0, data.Height / 2, data.Width / 2, data.Height, data.Width, depth);
                    },
                        () =>
                    {
                        //lower-right
                        Process(buffer, data.Width / 2, data.Height / 2, data.Width, data.Height, data.Width, depth);
                    }
                        );

                    //Copy the buffer back to image
                    Marshal.Copy(buffer, 0, data.Scan0, buffer.Length);

                    skin.UnlockBits(data);
                    pictureBox2.Image = skin;



                    Bitmap src = new Bitmap(pictureBox1.Image);
                    Bitmap res = new Bitmap(pictureBox2.Image);
                    src = resize(src, new Size(200, 200));
                    res = resize(res, new Size(200, 200));
                    pictureBox1.Image = src;
                    pictureBox2.Image = res;

                    GrayscaleBT709 grayoject = new GrayscaleBT709();
                    pictureBox2.Image = grayoject.Apply((Bitmap)pictureBox2.Image);

                    Dilatation filter = new Dilatation();
                    // apply the filter
                    pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image);

                    ExtractBiggestBlob filter1 = new ExtractBiggestBlob();
                    pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image);
                    blob = filter1.BlobPosition;

                    Bitmap src1   = new Bitmap(pictureBox1.Image);
                    Bitmap res1   = new Bitmap(pictureBox2.Image);
                    Bitmap newBmp = new Bitmap(src1.Width, res1.Height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);


                    //Threshold t = new Threshold();
                    //pictureBox2.Image = t.Apply((Bitmap)pictureBox2.Image);
                    for (int i = 0; i < res1.Width; i++)
                    {
                        for (int j = 0; j < res1.Height; j++)
                        {
                            System.Drawing.Color srcColor = src1.GetPixel(i + blob.X, j + blob.Y);
                            System.Drawing.Color dstColor = res1.GetPixel(i, j);
                            if (!(dstColor.R >= 0 && dstColor.R <= 10 && dstColor.G >= 0 && dstColor.G <= 10 && dstColor.B >= 0 && dstColor.B <= 10))
                            {
                                newBmp.SetPixel(i, j, srcColor);
                            }
                            else
                            {
                                newBmp.SetPixel(i, j, Color.Black);
                            }
                        }
                    }
                    res1 = newBmp;
                    pictureBox2.Image = newBmp;

                    List <double>  edgeCount  = new List <double>();
                    List <double>  ratio      = new List <double>();
                    int            pixelCount = 0;
                    Bitmap         Destimg    = new Bitmap(pictureBox2.Image);
                    GrayscaleBT709 go         = new GrayscaleBT709();
                    pictureBox2.Image = go.Apply((Bitmap)pictureBox2.Image);
                    Destimg           = go.Apply(Destimg);
                    CannyEdgeDetector filter2 = new CannyEdgeDetector(0, 0, 1.4);
                    pictureBox2.Image = filter2.Apply((Bitmap)pictureBox2.Image);
                    Destimg           = filter2.Apply(Destimg);


                    var imgarray = new System.Drawing.Image[36];

                    for (int i = 0; i < 6; i++)
                    {
                        for (int j = 0; j < 6; j++)
                        {
                            pixelCount++;
                            var index = i * 6 + j;
                            imgarray[index] = new Bitmap(40, 40);
                            var graphics = Graphics.FromImage(imgarray[index]);
                            graphics.DrawImage(Destimg, new Rectangle(0, 0, 40, 40), new Rectangle(i * 40, j * 40, 40, 40), GraphicsUnit.Pixel);
                            graphics.Dispose();
                        }
                    }

                    for (int n = 0; n < 36; n++)
                    {
                        int counter = 0;


                        Bitmap bufferImage = new Bitmap(imgarray[n]);
                        for (int i = 0; i < 40; i++)
                        {
                            for (int j = 0; j < 40; j++)
                            {
                                System.Drawing.Color hoefColor = bufferImage.GetPixel(i, j);
                                //if(hoefColor.R<=255 && hoefColor.R>=230 && hoefColor.G <= 255 && hoefColor.G >= 230 && hoefColor.B <= 255 && hoefColor.B >= 230)
                                if (!(hoefColor.R == 0 && hoefColor.G == 0 && hoefColor.B == 0))
                                {
                                    counter++;
                                }
                            }
                        }

                        edgeCount.Add(counter);
                    }

                    double total = edgeCount.Sum();
                    foreach (double x in edgeCount)
                    {
                        var a = (float)x / total;
                        ratio.Add(a);
                    }

                    FileStream   fs = new FileStream(@"D:\AI.txt", FileMode.Append, FileAccess.Write);
                    StreamWriter sw = new StreamWriter(fs);


                    sw.Write((no) + " ");
                    for (int i = 0; i < ratio.Count; ++i)
                    {
                        sw.Write(i + ":" + ratio[i].ToString() + " ");
                    }
                    sw.WriteLine();
                    sw.Close();
                    fs.Close();

                    Problem train = Problem.Read(@"D:\AI.txt");
                    Problem test  = Problem.Read(@"D:\test.txt");

                    Parameter parameters = new Parameter();

                    double C;
                    double Gamma;

                    parameters.C = 32; parameters.Gamma = 8;
                    Model model = Training.Train(train, parameters);
                    Prediction.Predict(test, @"D:\result.txt", model, false);
                }
            }
        }
コード例 #26
0
        private void Detection()
        {
            var watch = System.Diagnostics.Stopwatch.StartNew();

            if (Video.Image != null)
            {
                if (ModeList.selectedIndex == 0)
                {
                    training = 1;
                    int prev = AlphabetList.selectedIndex;
                    if (AlphabetList.selectedIndex == 26 || prev == 26)
                    {
                        label = 67;
                    }
                    else if (AlphabetList.selectedIndex == -1)
                    {
                        label = prev;
                    }
                    else
                    {
                        label = AlphabetList.selectedIndex;
                    }
                }
                else
                {
                    training = 0;
                }


                ProgressBar.Visible = true;

                ProgressBar.Value         = 0;
                ProgressBar.Maximum_Value = 9;
                ProgressBar.Value        += 1;


                CapturedBox.Image = (Bitmap)Video.Image.Clone();
                Bitmap src = new Bitmap(CapturedBox.Image);

                //skin detection
                var image  = new Rectangle(0, 0, src.Width, src.Height);
                var value  = src.LockBits(image, ImageLockMode.ReadWrite, src.PixelFormat);
                var size   = Bitmap.GetPixelFormatSize(value.PixelFormat) / 8;
                var buffer = new byte[value.Width * value.Height * size];
                Marshal.Copy(value.Scan0, buffer, 0, buffer.Length);

                System.Threading.Tasks.Parallel.Invoke(
                    () =>
                {
                    Skin_process(buffer, 0, 0, value.Width / 2, value.Height / 2, value.Width, size);
                },
                    () =>
                {
                    Skin_process(buffer, 0, value.Height / 2, value.Width / 2, value.Height, value.Width, size);
                },
                    () =>
                {
                    Skin_process(buffer, value.Width / 2, 0, value.Width, value.Height / 2, value.Width, size);
                },
                    () =>
                {
                    Skin_process(buffer, value.Width / 2, value.Height / 2, value.Width, value.Height, value.Width, size);
                }
                    );
                Marshal.Copy(buffer, 0, value.Scan0, buffer.Length);
                src.UnlockBits(value);
                SkinBox.Image = src;


                if (Skin == 1)
                {
                    ProgressBar.Value += 1;

                    //Dilation & Erosion
                    src = Grayscale.CommonAlgorithms.BT709.Apply(src);
                    BinaryDilation3x3 dilatation = new BinaryDilation3x3();
                    BinaryErosion3x3  erosion    = new BinaryErosion3x3();
                    for (int a = 1; a <= 10; a++)
                    {
                        src = dilatation.Apply(src);
                    }
                    for (int a = 1; a <= 10; a++)
                    {
                        src = erosion.Apply(src);
                    }

                    ProgressBar.Value += 1;
                    NoiseBox.Image     = src;

                    //Blob
                    try
                    {
                        ExtractBiggestBlob blob = new ExtractBiggestBlob();
                        src = blob.Apply(src);
                        x   = blob.BlobPosition.X;
                        y   = blob.BlobPosition.Y;
                        ProgressBar.Value += 1;
                    }
                    catch
                    {
                        this.Show();
                        //MessageBox.Show("Lightning conditions are not good for detecting the gestures", "Bad Lights", MessageBoxButtons.OK, MessageBoxIcon.Information);
                    }

                    //Merge
                    Bitmap srcImage = new Bitmap(CapturedBox.Image);
                    Bitmap dstImage = new Bitmap(src);
                    var    srcrect  = new Rectangle(0, 0, srcImage.Width, srcImage.Height);
                    var    dstrect  = new Rectangle(0, 0, dstImage.Width, dstImage.Height);
                    var    srcdata  = srcImage.LockBits(srcrect, ImageLockMode.ReadWrite, srcImage.PixelFormat);
                    var    dstdata  = dstImage.LockBits(dstrect, ImageLockMode.ReadWrite, dstImage.PixelFormat);
                    var    srcdepth = Bitmap.GetPixelFormatSize(srcdata.PixelFormat) / 8;
                    var    dstdepth = Bitmap.GetPixelFormatSize(dstdata.PixelFormat) / 8;
                    //bytes per pixel
                    var srcbuffer = new byte[srcdata.Width * srcdata.Height * srcdepth];
                    var dstbuffer = new byte[dstdata.Width * dstdata.Height * dstdepth];
                    //copy pixels to buffer
                    Marshal.Copy(srcdata.Scan0, srcbuffer, 0, srcbuffer.Length);
                    Marshal.Copy(dstdata.Scan0, dstbuffer, 0, dstbuffer.Length);

                    System.Threading.Tasks.Parallel.Invoke(
                        () =>
                    {
                        //upper-left
                        Merge_process(srcbuffer, dstbuffer, x, 0, y, 0, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height / 2), dstdata.Height / 2, srcdata.Width, dstdata.Width, srcdepth, dstdepth);
                    },
                        () =>
                    {
                        //upper-right
                        Merge_process(srcbuffer, dstbuffer, x + (dstdata.Width / 2), dstdata.Width / 2, y, 0, x + (dstdata.Width), dstdata.Width, y + (dstdata.Height / 2), dstdata.Height / 2, srcdata.Width, dstdata.Width, srcdepth, dstdepth);
                    },
                        () =>
                    {
                        //lower-left
                        Merge_process(srcbuffer, dstbuffer, x, 0, y + (dstdata.Height / 2), dstdata.Height / 2, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height), dstdata.Height, srcdata.Width, dstdata.Width, srcdepth, dstdepth);
                    },
                        () =>
                    {
                        //lower-right
                        Merge_process(srcbuffer, dstbuffer, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height / 2), dstdata.Height / 2, x + (dstdata.Width), dstdata.Width, y + (dstdata.Height), dstdata.Height, srcdata.Width, dstdata.Width, srcdepth, dstdepth);
                    }
                        );

                    //Copy the buffer back to image
                    Marshal.Copy(srcbuffer, 0, srcdata.Scan0, srcbuffer.Length);
                    Marshal.Copy(dstbuffer, 0, dstdata.Scan0, dstbuffer.Length);
                    srcImage.UnlockBits(srcdata);
                    dstImage.UnlockBits(dstdata);
                    src = dstImage;
                    ProgressBar.Value += 1;
                    CropBox.Image      = src;


                    //Resize
                    ResizeBilinear resize = new ResizeBilinear(200, 200);
                    src = resize.Apply(src);
                    ProgressBar.Value += 1;

                    //Edges
                    src = Grayscale.CommonAlgorithms.BT709.Apply((Bitmap)src);
                    SobelEdgeDetector edges = new SobelEdgeDetector();
                    src = edges.Apply(src);
                    ProgressBar.Value    += 1;
                    EdgeDetectorBox.Image = src;

                    //HOEF
                    Bitmap   block      = new Bitmap(src);
                    int[]    edgescount = new int[50];
                    double[] norm       = new double[200];
                    String   text       = null;
                    int      sum        = 0;
                    int      z          = 1;
                    for (int p = 1; p <= 6; p++)
                    {
                        for (int q = 1; q <= 6; q++)
                        {
                            for (int x = (p - 1) * block.Width / 6; x < (p * block.Width / 6); x++)
                            {
                                for (int y = (q - 1) * block.Height / 6; y < (q * block.Height / 6); y++)
                                {
                                    Color colorPixel = block.GetPixel(x, y);

                                    int r = colorPixel.R;
                                    int g = colorPixel.G;
                                    int b = colorPixel.B;

                                    if (r != 0 & g != 0 & b != 0)
                                    {
                                        edgescount[z]++;
                                    }
                                }
                            }
                            z++;
                        }
                    }

                    for (z = 1; z <= 36; z++)
                    {
                        sum = sum + edgescount[z];
                    }
                    for (z = 1; z <= 36; z++)
                    {
                        norm[z] = (double)edgescount[z] / sum;
                        text    = text + " " + z.ToString() + ":" + norm[z].ToString();
                    }

                    if (training == 1)
                    {
                        File.AppendAllText(@"D:\train.txt", label.ToString() + text + Environment.NewLine);
                        ProgressBar.Value += 1;
                    }
                    else
                    {
                        File.WriteAllText(@"D:\test.txt", label.ToString() + text + Environment.NewLine);
                        ProgressBar.Value += 1;


                        //SVM
                        Problem   train     = Problem.Read(@"D:\train.txt");
                        Problem   test      = Problem.Read(@"D:\test.txt");
                        Parameter parameter = new Parameter()
                        {
                            C     = 32,
                            Gamma = 8
                        };
                        Model model = Training.Train(train, parameter);
                        Prediction.Predict(test, @"D:\result.txt", model, false);
                        int value1 = Convert.ToInt32(File.ReadAllText(@"D:\result.txt"));


                        String alphabet = null;
                        if (value1 == 27)
                        {
                            alphabet += "Welcome  ";
                        }

                        else if (value1 == 28)
                        {
                            alphabet += "Good Morning";
                        }
                        else if (value1 == 29)
                        {
                            alphabet += "Thank You";
                        }
                        else
                        {
                            alphabet += (char)(65 + value1);
                        }


                        OutputText.Text = alphabet;
                        SpeechSynthesizer speechSynthesizer = new SpeechSynthesizer();
                        speechSynthesizer.SetOutputToDefaultAudioDevice();
                        speechSynthesizer.Volume = 100;
                        speechSynthesizer.Rate   = -2;
                        speechSynthesizer.SelectVoiceByHints(VoiceGender.Female, VoiceAge.Child);
                        speechSynthesizer.SpeakAsync(alphabet);

                        if (alphabet == " ")
                        {
                            speechSynthesizer.SpeakAsync(OutputText.Text);
                        }
                        ProgressBar.Value += 1;
                    }
                }
                else
                {
                    this.Show();
                }
                watch.Stop();
                var   time = (watch.ElapsedMilliseconds);
                float secs = (float)time / 1000;
                ExecutionTimeBox.Text = Convert.ToString(secs) + " " + "Seconds";
            }
        }
コード例 #27
0
        private void button2_Click(object sender, EventArgs e)
        {
            if (FinalFrame.IsRunning == true)
            {
                pictureBox2.Image = (Bitmap)pictureBox1.Image.Clone();
            }
            Bitmap     InputImage = (Bitmap)pictureBox2.Image;
            Rectangle  Tile       = new Rectangle(0, 0, InputImage.Width, InputImage.Height);
            BitmapData bitmapdata = InputImage.LockBits(Tile, ImageLockMode.ReadWrite, InputImage.PixelFormat);
            int        formatsize = Bitmap.GetPixelFormatSize(bitmapdata.PixelFormat) / 8;
            var        tempreg    = new byte[bitmapdata.Width * bitmapdata.Height * formatsize];

            Marshal.Copy(bitmapdata.Scan0, tempreg, 0, tempreg.Length);

            System.Threading.Tasks.Parallel.Invoke(
                () =>
            {
                multithread1(tempreg, 0, 0, bitmapdata.Width / 2, bitmapdata.Height / 2, bitmapdata.Width, formatsize);
            },
                () =>
            {
                multithread1(tempreg, 0, bitmapdata.Height / 2, bitmapdata.Width / 2, bitmapdata.Height, bitmapdata.Width, formatsize);
            },
                () =>
            {
                multithread1(tempreg, bitmapdata.Width / 2, 0, bitmapdata.Width, bitmapdata.Height / 2, bitmapdata.Width, formatsize);
            },
                () =>
            {
                multithread1(tempreg, bitmapdata.Width / 2, bitmapdata.Height / 2, bitmapdata.Width, bitmapdata.Height, bitmapdata.Width, formatsize);
            }
                );

            Marshal.Copy(tempreg, 0, bitmapdata.Scan0, tempreg.Length);
            InputImage.UnlockBits(bitmapdata);

            Grayscale  grayfilter   = new Grayscale(0.2125, 0.7154, 0.0721);//GrayscaleBT709 grayfilter=new GrayscaleBT709();
            Dilatation dilatefilter = new Dilatation();
            Erosion    erodefilter  = new Erosion();

            InputImage = grayfilter.Apply((Bitmap)InputImage);
            InputImage = dilatefilter.Apply((Bitmap)InputImage);
            InputImage = erodefilter.Apply((Bitmap)InputImage);
            //Opening openfilter = new Opening();
            //InputImage=openfilter.Apply((Bitmap)InputImage);
            //Closing closefilter = new Closing();
            //InputImage=closefilter.Apply((Bitmap)InputImage);

            ExtractBiggestBlob blob = new ExtractBiggestBlob();

            InputImage = blob.Apply(InputImage);
            int cordx = blob.BlobPosition.X;
            int cordy = blob.BlobPosition.Y;

            Bitmap source               = new Bitmap(pictureBox1.Image);
            Bitmap destination          = new Bitmap(InputImage);
            var    sourcerectangle      = new Rectangle(0, 0, source.Width, source.Height);
            var    destinationrectangle = new Rectangle(0, 0, destination.Width, destination.Height);
            var    sourcedata           = source.LockBits(sourcerectangle, ImageLockMode.ReadWrite, source.PixelFormat);
            var    destinationdata      = destination.LockBits(destinationrectangle, ImageLockMode.ReadWrite, destination.PixelFormat);
            var    sourcedepth          = Bitmap.GetPixelFormatSize(sourcedata.PixelFormat) / 8;
            var    destinationdepth     = Bitmap.GetPixelFormatSize(destinationdata.PixelFormat) / 8;
            var    source1              = new byte[sourcedata.Width * sourcedata.Height * sourcedepth];
            var    destination1         = new byte[destinationdata.Width * destinationdata.Height * destinationdepth];

            Marshal.Copy(sourcedata.Scan0, source1, 0, source1.Length);
            Marshal.Copy(destinationdata.Scan0, destination1, 0, destination1.Length);

            System.Threading.Tasks.Parallel.Invoke(
                () =>
            {
                multithread2(source1, destination1, cordx, 0, cordy, 0, cordx + (destinationdata.Width / 2), destinationdata.Width / 2, cordy + (destinationdata.Height / 2), destinationdata.Height / 2, sourcedata.Width, destinationdata.Width, sourcedepth, destinationdepth);
            },
                () =>
            {
                multithread2(source1, destination1, cordx + (destinationdata.Width / 2), destinationdata.Width / 2, cordy, 0, cordx + (destinationdata.Width), destinationdata.Width, cordy + (destinationdata.Height / 2), destinationdata.Height / 2, sourcedata.Width, destinationdata.Width, sourcedepth, destinationdepth);
            },
                () =>
            {
                multithread2(source1, destination1, cordx, 0, cordy + (destinationdata.Height / 2), destinationdata.Height / 2, cordx + (destinationdata.Width / 2), destinationdata.Width / 2, cordy + (destinationdata.Height), destinationdata.Height, sourcedata.Width, destinationdata.Width, sourcedepth, destinationdepth);
            },
                () =>
            {
                multithread2(source1, destination1, cordx + (destinationdata.Width / 2), destinationdata.Width / 2, cordy + (destinationdata.Height / 2), destinationdata.Height / 2, cordx + (destinationdata.Width), destinationdata.Width, cordy + (destinationdata.Height), destinationdata.Height, sourcedata.Width, destinationdata.Width, sourcedepth, destinationdepth);
            }
                );

            Marshal.Copy(source1, 0, sourcedata.Scan0, source1.Length);
            Marshal.Copy(destination1, 0, destinationdata.Scan0, destination1.Length);
            source.UnlockBits(sourcedata);
            destination.UnlockBits(destinationdata);
            InputImage = destination;

            InputImage = grayfilter.Apply((Bitmap)InputImage);
            CannyEdgeDetector edgesoutline = new CannyEdgeDetector();

            InputImage        = edgesoutline.Apply(InputImage);
            pictureBox2.Image = InputImage;

            Bitmap blocks = new Bitmap(InputImage);

            int[]    numofedges = new int[100];
            double[] normalized = new double[400];
            String   alphabet   = null;
            int      total      = 0;
            int      sq         = 1;

            for (int p = 1; p <= 8; p++)
            {
                for (int q = 1; q <= 8; q++)
                {
                    for (int x = (p - 1) * blocks.Width / 8; x < (p * blocks.Width / 8); x++)
                    {
                        for (int y = (q - 1) * blocks.Height / 8; y < (q * blocks.Height / 8); y++)
                        {
                            Color colorPixel = blocks.GetPixel(x, y);

                            int r = colorPixel.R;
                            int g = colorPixel.G;
                            int b = colorPixel.B;

                            if (r != 0 & g != 0 & b != 0)
                            {
                                numofedges[sq]++;
                            }
                        }
                    }
                    sq++;
                }
            }

            for (sq = 1; sq <= 64; sq++)
            {
                total = total + numofedges[sq];
            }
            for (sq = 1; sq <= 64; sq++)
            {
                normalized[sq] = (double)numofedges[sq] / total;
                alphabet       = alphabet + " " + sq.ToString() + ":" + normalized[sq].ToString();
            }
            File.WriteAllText(@"datasets\testalpha.txt", label.ToString() + alphabet + Environment.NewLine);

            Problem   train     = Problem.Read(@"datasets\trainedset.txt");
            Problem   test      = Problem.Read(@"datasets\testalpha.txt");
            Parameter parameter = new Parameter();

            parameter.C     = 32;
            parameter.Gamma = 8;
            Model model = Training.Train(train, parameter);

            Prediction.Predict(test, @"datasets\result.txt", model, false);
            int    value = Convert.ToInt32(File.ReadAllText(@"datasets\result.txt"));
            String res   = null;

            res         = res + (char)(value + 65);
            label1.Text = res;
        }
コード例 #28
0
        public void svmproject(Dictionary <int, Characteristic> Characteristics)
        {
            Dictionary <int, Characteristic> _characteristics = new Dictionary <int, Characteristic>();

            _characteristics = Characteristics;
            Model          model;
            RangeTransform range;
            double         C;
            double         gamma;
            // default values
            Parameter parameters = new Parameter();
            //parameters.SvmType = SvmType.C_SVC;
            //parameters.KernelType = KernelType.RBF;
            //parameters.Degree = 3;
            //parameters.Gamma = 0;
            //parameters.Coefficient0 = 0;
            //parameters.Nu = 0.5;
            //parameters.CacheSize = 40;
            //parameters.C = 1000;
            //parameters.EPS = 1e-3;
            //parameters.P = 0.1;
            //parameters.Shrinking = true;
            //parameters.WeightCount = 0;
            //parameters.WeightLabels = new int[0];
            //parameters.Weights = new double[0];
            //parameters.C = 5;
            //parameters.Gamma = 1;
            string str = null;

            for (int i = 1; i < _characteristics.Count(); i++)
            {
                if (_characteristics[i].IsQualifiedColony == false && _characteristics[i].IsInvalidColony == true)
                {
                    str = str + "0";
                    str = str + " 1:" + _characteristics[i].Area.ToString() + " 2:" + _characteristics[i].MajToMinAxisRatio.ToString() +
                          " 3:" + _characteristics[i].CentreAcerageColor.R.ToString() + " 4:" + _characteristics[i].CentreAcerageColor.G.ToString()
                          + " 5:" + _characteristics[i].CentreAcerageColor.B.ToString() + "\r\n";
                }
                if (_characteristics[i].IsQualifiedColony == true && _characteristics[i].IsInvalidColony == false)
                {
                    str = str + "1";
                    str = str + " 1:" + _characteristics[i].Area.ToString() + " 2:" + _characteristics[i].MajToMinAxisRatio.ToString() +
                          " 3:" + _characteristics[i].CentreAcerageColor.R.ToString() + " 4:" + _characteristics[i].CentreAcerageColor.G.ToString()
                          + " 5:" + _characteristics[i].CentreAcerageColor.B.ToString() + "\r\n";
                }
            }
            if (str != null)
            {
                byte[]       array  = Encoding.ASCII.GetBytes(str);
                MemoryStream stream = new MemoryStream(array);             //convert stream 2 string
                Problem      train  = new Problem();
                train = Problem.Read(stream);
                range = Scaling.DetermineRange(train);
                train = Scaling.Scale(train, range);
                //String outfile001="D:\\parameters.txt";
                ParameterSelection.Grid(train, parameters, @"D:\\parameters.txt", out C, out gamma);
                parameters.C     = C;
                parameters.Gamma = gamma;
                model            = Training.Train(train, parameters);
                //MessageBox.Show("学习完毕");
                //stream.Dispose();
                stream.Close();
            }
            else
            {
                MessageBox.Show("无学习数据");
                model = null;
                range = null;
            }

            string str1 = null;

            for (int i = 1; i < _characteristics.Count(); i++)
            {
                str1 = str1 + "0";
                str1 = str1 + " 1:" + _characteristics[i].Area.ToString() + " 2:" + _characteristics[i].MajToMinAxisRatio.ToString() +
                       " 3:" + _characteristics[i].CentreAcerageColor.R.ToString() + " 4:" + _characteristics[i].CentreAcerageColor.G.ToString()
                       + " 5:" + _characteristics[i].CentreAcerageColor.B.ToString() + "\r\n";
            }
            if (str1 != null)
            {
                byte[]       array  = Encoding.ASCII.GetBytes(str1);
                MemoryStream stream = new MemoryStream(array);             //convert stream 2 string
                Problem      pre    = new Problem();
                pre = Problem.Read(stream);
                pre = Scaling.Scale(pre, range);
                Prediction.Predict(pre, @"D:\result.txt", model, false);
                MessageBox.Show("筛选完毕");
                //stream.Dispose();
                stream.Close();
            }
            else
            {
                MessageBox.Show("无筛选数据");
            }
            //svm_problem prob = new svm_problem();
            //prob.l = point_list.Count;
            //prob.y = new double[prob.l];
            //            if(param.svm_type == svm_parameter.EPSILON_SVR ||
            //    param.svm_type == svm_parameter.NU_SVR)
            //{
            //    if(param.gamma == 0) param.gamma = 1;
            //    prob.x = new svm_node[prob.l][];
            //    for(int i=0;i<prob.l;i++)
            //    {
            //        point p = (point)point_list[i];
            //        prob.x[i][0] = new svm_node();
            //        prob.x[i][0].index = 1;
            //        prob.x[i][0].value_Renamed = p.x;
            //        prob.y[i] = p.y;
            //    }
            //    svm_model model = svm.svm_train(prob, param);
            //    svm_node[] x = new svm_node[1];
            //    x[0] = new svm_node();
            //    x[0].index = 1;
            //    int[] j = new int[XLEN];
            //C = Convert.ToInt16(numericUpDown8.Value);
            //gamma = Convert.ToInt16(numericUpDown9.Value);
            //StudyAlgorithm study = new StudyAlgorithm();
            //study.GetModel(AllColony, C, gamma, out model, out range);
            //ScreenAlgorithm screenAlgorithm = new ScreenAlgorithm();
            //screenAlgorithm.ScreenTheColony(CharacteristicsValue, model, range);
        }