Exemple #1
0
        public int[] Predict(List <TrainingValue> predictionData)
        {
            if (!Trained)
            {
                throw new Exception("Train must be called first!");
            }

            double[][] featuresArray = new double[predictionData.Count][];

            for (int i = 0; i < featuresArray.Length; i++)
            {
                featuresArray[i] = predictionData[i].Features;
            }

            switch (type)
            {
            case ClassifierType.DecisionTree:
                return(tree.Decide(featuresArray));

            case ClassifierType.LDA:
                return(pipeline.Decide(featuresArray));

            case ClassifierType.SVM:
                return(convertBoolArray(svm.Decide(featuresArray)));

            case ClassifierType.Bayes:
                return(bayes.Decide(featuresArray));
            }

            return(null);
        }
Exemple #2
0
        public void learn_test()
        {
            #region doc_learn
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // Create a new Gaussian distribution naive Bayes learner
            var teacher = new NaiveBayesLearning <NormalDistribution>();

            // Set options for the component distributions
            teacher.Options.InnerOption = new NormalOptions
            {
                Regularization = 1e-5 // to avoid zero variances
            };

            // Learn the naive Bayes model
            NaiveBayes <NormalDistribution> bayes = teacher.Learn(inputs, outputs);

            // Use the model to predict class labels
            int[] predicted = bayes.Decide(inputs);

            // Estimate the model error. The error should be zero:
            double error = new ZeroOneLoss(outputs).Loss(predicted);

            // Now, let's test  the model output for the first input sample:
            int answer = bayes.Decide(new double[] { 1, 0, 0, 1 }); // should be 1
            #endregion

            Assert.AreEqual(0, error);
            Assert.AreEqual(1, answer);
            Assert.IsTrue(predicted.IsEqual(outputs));
        }
        public double Predict(double[][] observations, int[] labels)
        {
            int[] predicted = machine.Decide(observations);

            double error = new AccuracyLoss(labels).Loss(predicted);

            return(1 - error);
        }
Exemple #4
0
        public void ComputeTest3()
        {
            #region doc_multiclass
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            int[][] inputs =
            {
                //               input      output
                new int[] { 0, 1, 1, 0 }, //  0
                new int[] { 0, 1, 0, 0 }, //  0
                new int[] { 0, 0, 1, 0 }, //  0
                new int[] { 0, 1, 1, 0 }, //  0
                new int[] { 0, 1, 0, 0 }, //  0
                new int[] { 1, 0, 0, 0 }, //  1
                new int[] { 1, 0, 0, 0 }, //  1
                new int[] { 1, 0, 0, 1 }, //  1
                new int[] { 0, 0, 0, 1 }, //  1
                new int[] { 0, 0, 0, 1 }, //  1
                new int[] { 1, 1, 1, 1 }, //  2
                new int[] { 1, 0, 1, 1 }, //  2
                new int[] { 1, 1, 0, 1 }, //  2
                new int[] { 0, 1, 1, 1 }, //  2
                new int[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // Let us create a learning algorithm
            var learner = new NaiveBayesLearning();

            // and teach a model on the data examples
            NaiveBayes nb = learner.Learn(inputs, outputs);

            // Now, let's test  the model output for the first input sample:
            int answer = nb.Decide(new int[] { 0, 1, 1, 0 }); // should be 1
            #endregion

            double error = new ZeroOneLoss(outputs).Loss(nb.Decide(inputs));
            Assert.AreEqual(0, error);

            for (int i = 0; i < inputs.Length; i++)
            {
                error = nb.Compute(inputs[i]);
                double expected = outputs[i];
                Assert.AreEqual(expected, error);
            }
        }
Exemple #5
0
        private GeneralConfusionMatrix Test(NaiveBayes <NormalDistribution> nb, double[][] x_test, int[] y_expected, out int[] y_predicted)
        {
            y_predicted = nb.Decide(x_test);
            var nb_conf = new GeneralConfusionMatrix(y_expected, y_predicted);

            return(nb_conf);
        }
        public void Predict(params string[] args)
        {
            int[] instance;
            try
            {
                instance = codeBook.Transform(args);
            }
            catch (Exception e)
            {
                System.Console.WriteLine(e.Message);
                return;
            }

            int    c      = nativeBayes.Decide(instance);
            string result = codeBook.Revert(headerToPredict, c);

            System.Console.WriteLine(result);

            double[] probs = nativeBayes.Probabilities(instance);

            foreach (var item in probs)
            {
                System.Console.WriteLine(item);
            }
        }
        public void learn_no_datatable()
        {
            #region doc_mitchell_no_datatable
            string[] columnNames = { "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis" };

            string[][] data =
            {
                new string[] { "Sunny",    "Hot",  "High",   "Weak",   "No"  },
                new string[] { "Sunny",    "Hot",  "High",   "Strong", "No"  },
                new string[] { "Overcast", "Hot",  "High",   "Weak",   "Yes" },
                new string[] { "Rain",     "Mild", "High",   "Weak",   "Yes" },
                new string[] { "Rain",     "Cool", "Normal", "Weak",   "Yes" },
                new string[] { "Rain",     "Cool", "Normal", "Strong", "No"  },
                new string[] { "Overcast", "Cool", "Normal", "Strong", "Yes" },
                new string[] { "Sunny",    "Mild", "High",   "Weak",   "No"  },
                new string[] { "Sunny",    "Cool", "Normal", "Weak",   "Yes" },
                new string[] { "Rain",     "Mild", "Normal", "Weak",   "Yes" },
                new string[] { "Sunny",    "Mild", "Normal", "Strong", "Yes" },
                new string[] { "Overcast", "Mild", "High",   "Strong", "Yes" },
                new string[] { "Overcast", "Hot",  "Normal", "Weak",   "Yes" },
                new string[] { "Rain",     "Mild", "High",   "Strong", "No"  },
            };

            // Create a new codification codebook to
            // convert strings into discrete symbols
            Codification codebook = new Codification(columnNames, data);

            // Extract input and output pairs to train
            int[][] symbols = codebook.Transform(data);
            int[][] inputs  = symbols.Get(null, 0, -1); // Gets all rows, from 0 to the last (but not the last)
            int[]   outputs = symbols.GetColumn(-1);    // Gets only the last column

            // Create a new Naive Bayes learning
            var learner = new NaiveBayesLearning();

            NaiveBayes nb = learner.Learn(inputs, outputs);

            // Consider we would like to know whether one should play tennis at a
            // sunny, cool, humid and windy day. Let us first encode this instance
            int[] instance = codebook.Translate("Sunny", "Cool", "High", "Strong");

            // Let us obtain the numeric output that represents the answer
            int c = nb.Decide(instance); // answer will be 0

            // Now let us convert the numeric output to an actual "Yes" or "No" answer
            string result = codebook.Translate("PlayTennis", c); // answer will be "No"

            // We can also extract the probabilities for each possible answer
            double[] probs = nb.Probabilities(instance); // { 0.795, 0.205 }
            #endregion

            Assert.AreEqual("No", result);
            Assert.AreEqual(0, c);
            Assert.AreEqual(0.795, probs[0], 1e-3);
            Assert.AreEqual(0.205, probs[1], 1e-3);
            Assert.AreEqual(1, probs.Sum(), 1e-10);
            Assert.IsFalse(double.IsNaN(probs[0]));
            Assert.AreEqual(2, probs.Length);
        }
Exemple #8
0
        public SentimentResult Predict(string sentiment)
        {
            var result     = new SentimentResult();
            var tokenized  = _preprocessor.Process(sentiment);
            var featurized = _bagOfWords.Transform(tokenized).ToInt32();

            var scores = _bayes.Scores(featurized);
            var prob   = _bayes.Probabilities(featurized);

            result.Polarity            = _bayes.Decide(featurized) == 0 ? Polarity.Negative : Polarity.Positive;
            result.NegativeScore       = scores[0];
            result.PositiveScore       = scores[1];
            result.NegativeProbability = prob[0];
            result.PositiveProbability = prob[1];

            return(result);
        }
Exemple #9
0
        public bool Test(Vector <double> instance)
        {
            double[] instanceArray = new double[instance.Count - 1];
            for (int i = 1; i < instance.Count; i++)
            {
                instanceArray[i - 1] = instance[i];
            }
            int answer         = bayes.Decide(instanceArray);
            int expectedResult = instance[0] == 0 ? 0 : instance[0] == 0.5 ? 1 : 2;

            return(answer == expectedResult);
        }
        private void btnTestingRun_Click(object sender, EventArgs e)
        {
            if (bayes == null || dgvTestingSource?.DataSource == null)
            {
                MessageBox.Show("Please create a classifier first.");
                return;
            }


            // Creates a matrix from the source data table
            double[,] table = (dgvLearningSource?.DataSource as DataTable).ToMatrix();


            // Get only the input vector values
            double[][] inputs = table.Get(null, 0, 2).ToJagged();

            // Get only the label outputs
            if (table != null)
            {
                int[] expected = new int[table.GetLength(0)];
                for (int i = 0; i < expected.Length; i++)
                {
                    expected[i] = (int)table[i, 2];
                }

                // Compute the machine outputs
                int[] output = bayes.Decide(inputs);


                // Use confusion matrix to compute some statistics.
                ConfusionMatrix confusionMatrix = new ConfusionMatrix(output, expected, 1, 0);
                if (dgvPerformance != null)
                {
                    dgvPerformance.DataSource = new List <ConfusionMatrix> {
                        confusionMatrix
                    };

                    foreach (DataGridViewColumn col in dgvPerformance.Columns)
                    {
                        col.Visible = true;
                    }
                }
                if (Column1 != null && Column2 != null)
                {
                    Column1.Visible = Column2.Visible = false;

                    // Create performance scatter plot
                    CreateResultScatterplot(zedGraphControl1, inputs, expected.ToDouble(), output.ToDouble());
                }
            }
        }
        public UserType ClassifyUser(UserStatistics statistics)
        {
            var input = new double[]
            {
                statistics.AccountAge,
                statistics.CommentsCount,
                statistics.DailyLoginsInRow,
                statistics.MoneySpent,
            };

            var result = _naiveBayes.Decide(input);

            return((UserType)result);
        }
Exemple #12
0
        private static void test(NaiveBayes <GeneralDiscreteDistribution, int> nb, int[][] inputs, int[] sp)
        {
            int c = nb.Decide(sp);             // 1

            double[] p = nb.Probabilities(sp); // 0.015, 0.985

            // Evaluation of all points
            int[] actual = nb.Decide(inputs);

            Assert.AreEqual(1, c);
            Assert.AreEqual(0.015197568389057824, p[0], 1e-10);
            Assert.AreEqual(0.98480243161094227, p[1], 1e-10);

            Assert.AreEqual(nb.Distributions[0].Components[0].Frequencies[0], 0.46153846153846156);
            Assert.AreEqual(nb.Distributions[0].Components[1].Frequencies[0], 0.23076923076923078);
            Assert.AreEqual(nb.Distributions[0].Components[2].Frequencies[0], 0.15384615384615385);
            Assert.AreEqual(nb.Distributions[0].Components[3].Frequencies[0], 0.38461538461538464);
            Assert.AreEqual(nb.Distributions[0].Components[4].Frequencies[0], 0.23076923076923078);
            Assert.AreEqual(nb.Distributions[0].Components[5].Frequencies[0], 0.92307692307692313);
            Assert.AreEqual(nb.Distributions[0].Components[6].Frequencies[0], 0.92307692307692313);
            Assert.AreEqual(nb.Distributions[0].Components[7].Frequencies[0], 0.53846153846153844);

            Assert.AreEqual(nb.Distributions[1].Components[0].Frequencies[0], 0.46153846153846156);
            Assert.AreEqual(nb.Distributions[1].Components[1].Frequencies[0], 0.23076923076923078);
            Assert.AreEqual(nb.Distributions[1].Components[2].Frequencies[0], 0.61538461538461542);
            Assert.AreEqual(nb.Distributions[1].Components[3].Frequencies[0], 0.38461538461538464);
            Assert.AreEqual(nb.Distributions[1].Components[4].Frequencies[0], 0.92307692307692313);
            Assert.AreEqual(nb.Distributions[1].Components[5].Frequencies[0], 0.30769230769230771);
            Assert.AreEqual(nb.Distributions[1].Components[6].Frequencies[0], 0.30769230769230771);
            Assert.AreEqual(nb.Distributions[1].Components[7].Frequencies[0], 0.076923076923076927);

            int[] last   = actual.Get(new[] { 11, 12 }.Concatenate(Vector.Range(14, 22)));
            int[] others = actual.Get(Vector.Range(0, 10).Concatenate(13));
            Assert.IsTrue(1.IsEqual(last));
            Assert.IsTrue(0.IsEqual(others));
        }
        /// <summary>
        /// Analyzes an array of text using a Naive Bayes classifier and returns estimated political leaning
        /// </summary>
        /// <param name="texts">Array of texts to classify</param>
        /// <returns>Average of all political leanings for the texts array. 0.0 is conservative, 1.0 is liberal, 0.5 is moderate</returns>
        public float Analyze(string[] texts)
        {
            if (texts.Length == 0)
            {
                return(DEFAULT_RANK);
            }
            //Similar process to training: Tokenize, remove common words, throw into the bag of words
            string[][] words = texts.Tokenize();
            words = TrimStopWords(words);
            double[][] transform = bagOfWords.Transform(words);

            //Get the actual results and average them if possible
            int[] results = nbClassifier.Decide(transform);
            if (results.Length == 0)
            {
                return(DEFAULT_RANK);
            }
            return((float)results.Average());
        }
Exemple #14
0
        public bool[] classify(double[][] oinputs)
        {
            // We can estimate class labels using
            int[]  predicted = nb.Decide(oinputs);
            bool[] answers   = new bool[predicted.Length];

            for (int i = 0; i < predicted.Length; i++)
            {
                if (predicted[i] == 0)
                {
                    answers[i] = false;
                }
                else
                {
                    answers[i] = true;
                }
            }
            return(answers);
        }
Exemple #15
0
        static void Main(string[] args)
        {
            DataTable data = new DataTable("Categories of words");

            data.Columns.Add("Category", "Word");
            List <InputData> words = ExcelDataProvider.GetData(@"C:\Users\Дарья\Desktop\AdverbNoun.xlsx", 0);

            foreach (var word in words)
            {
                data.Rows.Add(word.Category, word.Word);
            }

            Codification codebook = new Codification(data, "Category", "Word");

            DataTable symbols = codebook.Apply(data);

            int[][] inputs  = symbols.ToJagged <int>("Category");
            int[]   outputs = symbols.ToArray <int>("Word");

            var        learner = new NaiveBayesLearning();
            NaiveBayes nb      = learner.Learn(inputs, outputs);

            data = new DataTable("Categories of words");
            data.Columns.Add("Category", "Word");
            words = ExcelDataProvider.GetData(@"C:\Users\Дарья\Desktop\TestAdverbNoun.xlsx", 0);

            foreach (var word in words)
            {
                data.Rows.Add(word.Category, word.Word);
            }

            int[] instance = codebook.Translate("helpful");

            int c = nb.Decide(instance);

            string result = codebook.Translate("Category", c);

            double[] probs = nb.Probabilities(instance);

            Console.WriteLine(0);
        }
        public double Learn(double[][] observations, int[] labels)
        {
            var teacher = new NaiveBayesLearning <CauchyDistribution>();

            //teacher.Options.InnerOption = new NormalOptions()
            //{
            //    Regularization = 1e-5
            //};


            // Use the learning algorithm to learn
            machine = teacher.Learn(observations, labels);

            // Classify the samples using the model
            int[] predicted = machine.Decide(observations);

            double error = new AccuracyLoss(labels).Loss(predicted);


            return(1 - error);
        }
Exemple #17
0
        public void Learn(int?index)
        {
            var learner = new NaiveBayesLearning <NormalDistribution>();

            learner.Options.InnerOption = new NormalOptions
            {
                Regularization = 1e-5 // to avoid zero variances
            };
            bayes = learner.Learn(inputs, outputs);
            int[] predicted = bayes.Decide(inputs);

            int correctCount = 0;
            int wrongCount   = 0;

            for (int i = 0; i < outputs.Length; i++)
            {
                if (predicted[i] == outputs[i])
                {
                    correctCount++;
                }
                else
                {
                    wrongCount++;
                }
            }

            Console.WriteLine(DASHES);
            Console.WriteLine("Bajeso teorema");
            if (index != null)
            {
                Console.WriteLine("{0} iteracija", index);
            }
            Console.WriteLine(DASHES);
            Console.WriteLine("Apmokymo duomenys");
            Console.WriteLine("Teisingi: {0}", correctCount);
            Console.WriteLine("Teisingi procentais: {0}%", Math.Round((double)correctCount / outputs.Length * 100, 2));
            Console.WriteLine("Neteisingi: {0}", wrongCount);
            Console.WriteLine("Neteisingi procentais: {0}%", Math.Round((double)wrongCount / outputs.Length * 100, 2));
            Console.WriteLine(DASHES);
        }
        public static string getNaiveBayesResult(string newsType, Guid companyId)
        {
            try
            {
                int[] info = codebook.Translate(new string[] { "NewsType", "CompanyId" }, new string[] { newsType.ToString(), companyId.ToString() });
                //int[] info = codebook.Transform(new string[] { newsType, companyId.ToString() });
                if (naiveBayes == null)
                {
                    instance = AppNaiveBayes.Instance;
                }

                if (naiveBayes != null)
                {
                    int    c      = naiveBayes.Decide(info);
                    string result = codebook.Translate("Percent", c);
                    return(result);
                }
                return("-1");
            }
            catch (Exception e)
            {
                return("-1");
            }
        }
        public ActionResult RecomendedPosts()
        {
            // Query for DatasetRecommndedPosts
            var query = (from u in db.Clients
                         join post in db.Posts on u.ID equals post.ClientID
                         select new DatasetRecommndedPosts
            {
                clientID = u.ID, generID = post.GenreID
            });

            DatasetRecommndedPosts[] datasetRecommndedPosts = query.ToArray();

            // No Posts yet
            if (datasetRecommndedPosts.Length == 0)
            {
                return(View(new List <Post>()));
            }

            // One Post
            if (datasetRecommndedPosts.Length == 1)
            {
                var post = db.Posts.Include(p => p.Client)
                           .Include(p => p.Genre)
                           .Where(p => p.GenreID == (datasetRecommndedPosts[0].generID));
                return(View(post));
            }

            // More then One post
            int numOfGener   = db.Genres.ToList().Count;
            int numOfClients = db.Clients.ToList().Count;

            // Create and fill the dataset
            int[][]    input  = new int[datasetRecommndedPosts.Length][]; /* ClientID */
            List <int> output = new List <int>();                         /* GenderID */

            for (int i = 0; i < datasetRecommndedPosts.Length; i++)
            {
                input[i] = new int[] { datasetRecommndedPosts[i].clientID };
                output.Add(datasetRecommndedPosts[i].generID);
            }

            // Use Naive Bayes for learning
            var bayes    = new NaiveBayes(numOfGener, new[] { numOfClients });
            var learning = new NaiveBayesLearning()
            {
                Model = bayes
            };

            // Mapper for Consecutive numbers - Naive bayes expect to receive consecutive data starts from 0
            Dictionary <int, int> inputMapper  = new Dictionary <int, int>();
            Dictionary <int, int> outputMapper = new Dictionary <int, int>();

            // Create mapper for input data
            int key = 0;

            for (int index = 0; index < input.Length; index++)
            {
                if (!inputMapper.ContainsKey(input[index][0]))
                {
                    inputMapper.Add(input[index][0], key);
                    input[index][0] = key;
                    key++;
                }
                else
                {
                    input[index][0] = inputMapper[input[index][0]];
                }
            }

            // Create mapper for output data
            key = 0;
            for (int index = 0; index < output.Count; index++)
            {
                if (!outputMapper.ContainsKey(output[index]))
                {
                    outputMapper.Add(output[index], key);
                    output[index] = key;
                    key++;
                }
                else
                {
                    output[index] = outputMapper[output[index]];
                }
            }

            // If there is no posts for this user
            int currentClientID = ((Client)Session["Client"]).ID;

            if (!inputMapper.ContainsKey(currentClientID))
            {
                return(View(new List <Post>()));
            }

            // Naive bayes learning and decide
            learning.Learn(input, output.ToArray());
            int answerGenderID = bayes.Decide(new int[] { inputMapper[currentClientID] });

            // Gets the real GenderID by mapper
            int mapAnswerGenderID = 0;

            foreach (var n in outputMapper)
            {
                if (n.Value == answerGenderID)
                {
                    mapAnswerGenderID = n.Key;
                }
            }

            // Gets the posts with recommended genderID
            var posts = db.Posts.Include(p => p.Client).Include(p => p.Genre).Where(p => p.GenreID == (mapAnswerGenderID));

            return(View(posts));
        }
        private void GenerateBasedOnData()
        {
            List <string[]> generating = new List <string[]>(); // do ewentualnego sprawdzania

            var attrType = RemoveAt(this.attrType, 0);

            //tutaj dorzucam tworzenie wykresu ciągłego prawdopodobieństwa
            Spline3Deg[,] probabilities = new Spline3Deg[classes, attribs];
            for (int i = 0; i < attribs; i++)
            {
                if (attrType[i].Equals("double") || attrType[i].Equals("integer"))
                {
                    for (int j = 0; j < classes; j++)
                    {
                        int      c    = values.ElementAt(j).Value.Item2.ElementAt(i).Value.Count;
                        double[] y, x = new double[c];
                        SortedList <double, int> temp = new SortedList <double, int>();
                        foreach (var v in values.ElementAt(j).Value.Item2.ElementAt(i).Value)
                        {
                            int tI = v.Value; double tD = Double.Parse(v.Key.Replace(" ", string.Empty),
                                                                       System.Globalization.NumberStyles.AllowDecimalPoint,
                                                                       System.Globalization.NumberFormatInfo.InvariantInfo);
                            temp.Add(tD, tI);
                        }
                        y    = temp.Keys.ToArray();
                        x[0] = 0;
                        for (int k = 1; k < temp.Count; k++)
                        {
                            x[k] = x[k - 1] + temp.ElementAt(k - 1).Value + temp.ElementAt(k).Value;
                        }
                        probabilities[j, i] = new Spline3Deg(x, y);
                    }
                }
            }


            //do sprawdzania punktacji później
            //podzielić dane wejściowe i wygenerowane na klasy i artybuty
            var readClass  = new int[reading.Count];
            var readAttr_d = new double[reading.Count, reading.ElementAt(0).Length - 1].ToJagged();

            var stringIntCheatSheet = new Dictionary <string, int> [reading.ElementAt(0).Length];

            for (int i = 0; i < stringIntCheatSheet.Length; i++)
            {
                stringIntCheatSheet[i] = new Dictionary <string, int>();
            }

            for (int x = 0; x < reading.Count; x++)
            {
                for (int y = 0; y < reading.ElementAt(0).Length; y++)
                {
                    double rr = 0;
                    string ss = reading.ElementAt(x)[y];
                    if (!double.TryParse(ss, System.Globalization.NumberStyles.AllowDecimalPoint,
                                         System.Globalization.NumberFormatInfo.InvariantInfo, out rr) ||
                        y == 0)
                    {
                        if (!stringIntCheatSheet[y].ContainsKey(ss))
                        {
                            stringIntCheatSheet[y].Add(ss, stringIntCheatSheet[y].Count);
                        }
                        rr = stringIntCheatSheet[y][ss];
                    }
                    if (y == 0)
                    {
                        readClass[x] = (int)rr;
                    }
                    else
                    {
                        readAttr_d[x][y - 1] = rr;
                    }
                }
            }
            int readClassesSqrt = (int)Math.Round(Math.Sqrt(reading.Count)),
                genClassesSqrt, mixClassesSqrt;
            var learnKnn = new KNearestNeighbors(readClassesSqrt);

            var knn = learnKnn.Learn(readAttr_d, readClass);

            double[] attrcr = new double[attribs];


            string[] bestattr = new string[attribs];
            double   bestscore;

            //czas generować ten szajs
            var newStuff = new string[newData, attribs + 1];

            for (int it = 0; it < newStuff.GetLength(0); it++)
            {
                bestscore = 0;

                int cl = rnd.Next(classes); //rnd to zadelkarowany wcześniej Random //losowanie klasy
                newStuff[it, 0] = values.ElementAt(cl).Key;
                int safety = 0;
                do
                {
                    for (int v = 1; v <= attribs; v++)
                    {     //losowanie wartości atrybutu
                        if (attrType[v - 1].Equals("string"))
                        { //funkcja dyskretna
                            int val = rnd.Next(values.ElementAt(cl).Value.Item1);
                            int b   = 0;
                            foreach (var a in values.ElementAt(cl).Value.Item2[v])
                            {
                                if (val < (b += a.Value))
                                {
                                    newStuff[it, v] = a.Key; //na Monte Carlo
                                    break;
                                }
                            }
                        }
                        else
                        {  //funkcja ciągła
                            Tuple <double, double> extr = probabilities[cl, v - 1].Limits();
                            double val = rnd.Next((int)extr.Item1, (int)extr.Item2) + rnd.NextDouble();
                            double r   = probabilities[cl, v - 1].y(val);
                            if (attrType[v - 1].Equals("double"))
                            {
                                newStuff[it, v] = r.ToString(fltPrec, System.Globalization.CultureInfo.InvariantCulture);
                            }
                            else //if (attrType[v - 1].Equals("integer"))
                            {
                                newStuff[it, v] = Math.Round(r).ToString();
                            }
                        }//koniec losowania wartości atrybutu
                        ///ekstra warunek bezpieczeństwa, bo czasami trafiają się NULLe
                        if (string.IsNullOrEmpty(newStuff[it, v]))
                        {
                            v--;
                            continue; //jeśli atrybut ma nulla, powtórz pętlę
                        }
                        ///koniec ekstra warunku bespieczeństwa
                    }//koniec generowania obiektu


                    //do tabliczki do sprawdzenia punktacji
                    for (int v = 1; v <= attribs; v++)
                    {
                        double rr = 0;
                        string ss = newStuff[it, v];
                        if (!double.TryParse(ss, System.Globalization.NumberStyles.AllowDecimalPoint,
                                             System.Globalization.NumberFormatInfo.InvariantInfo, out rr))
                        {
                            if (!stringIntCheatSheet[v].ContainsKey(ss))
                            {
                                stringIntCheatSheet[v].Add(ss, stringIntCheatSheet[v].Count);
                            }
                            rr = stringIntCheatSheet[v][ss];
                        }
                        attrcr[v - 1] = rr;
                    }
                    if (knn.Score(attrcr, cl) > bestscore)
                    {
                        for (int iter = 0; iter < attribs; iter++)
                        {
                            bestattr[iter] = newStuff[it, iter + 1];
                        }
                    }
                } while (knn.Score(attrcr, cl) < scoreH / 100 && ++safety < 1000);

                for (int iter = 0; iter < attribs; iter++)
                {
                    newStuff[it, iter + 1] = bestattr[iter];
                }
            }//koniec całego generowania

            //tu dać zapis do pliku
            string savefiledir = "";

            using (var dirB = new System.Windows.Forms.SaveFileDialog())
            {
                dirB.Filter     = "Text Files | *.txt";
                dirB.DefaultExt = "txt";
                var res = dirB.ShowDialog();
                if (res == System.Windows.Forms.DialogResult.OK)
                {
                    using (var write = new System.IO.StreamWriter(savefiledir = dirB.FileName))
                    {
                        for (int x = 0; x < newStuff.GetLength(0); x++)
                        {
                            string line = "";
                            for (int y = 0; y < newStuff.GetLength(1); y++)
                            {
                                line += newStuff[x, y] + ',';
                            }
                            line = line.Remove(line.Length - 1);
                            string[] temp = line.Split(',');
                            generating.Add(line.Split(','));
                            swap(ref temp[0], ref temp[clsCol]);
                            line = "";
                            for (int y = 0; y < temp.Length; y++)
                            {
                                line += temp[y] + ',';
                            }
                            line = line.Remove(line.Length - 1);
                            write.WriteLine(line);
                        }
                    }
                }
                else
                {
                    return;
                }
            }
            //tu dać walidację wygenerowanych danych

            var dialogResult = System.Windows.MessageBox.Show("Do you want to test the generated data?", "Data testing - extended data", System.Windows.MessageBoxButton.YesNo);

            if (dialogResult == MessageBoxResult.Yes)
            {
                var genClass = new int[generating.Count];
                //var genAttr = new int[generating.Count, generating.ElementAt(0).Length - 1].ToJagged();
                var genAttr_d = new double[generating.Count, generating.ElementAt(0).Length - 1].ToJagged();


                for (int x = 0; x < generating.Count; x++)
                {
                    for (int y = 0; y < generating.ElementAt(0).Length; y++)
                    {
                        double rr = 0;
                        string ss = generating.ElementAt(x)[y];
                        if (!double.TryParse(ss, System.Globalization.NumberStyles.AllowDecimalPoint,
                                             System.Globalization.NumberFormatInfo.InvariantInfo, out rr) || y == 0)
                        {
                            if (!stringIntCheatSheet[y].ContainsKey(ss))
                            {
                                stringIntCheatSheet[y].Add(ss, stringIntCheatSheet[y].Count);
                            }
                            rr = stringIntCheatSheet[y][ss];
                        }
                        if (y == 0)
                        {
                            genClass[x] = (int)rr;
                        }
                        else
                        {
                            genAttr_d[x][y - 1] = rr;
                        }
                    }
                }

                //przerobienie na tablicę intów, z przesunięciem dobli o precyzję
                var genAttr_i  = new int[generating.Count, generating.ElementAt(0).Length - 1].ToJagged();
                var readAttr_i = new int[reading.Count, reading.ElementAt(0).Length - 1].ToJagged();

                int shift = (int)Math.Pow(10, FltPrecBox.SelectedIndex + 1);
                for (int x = 0; x < generating.Count; x++)
                {
                    for (int y = 0; y < generating.ElementAt(0).Length - 1; y++)
                    {
                        if (attrType[y].Equals("double"))
                        {
                            genAttr_i[x][y] = (int)(genAttr_d[x][y] * shift);
                        }
                        else
                        {
                            genAttr_i[x][y] = (int)genAttr_d[x][y];
                        }
                    }
                }
                for (int x = 0; x < reading.Count; x++)
                {
                    for (int y = 0; y < reading.ElementAt(0).Length - 1; y++)
                    {
                        if (attrType[y].Equals("double"))
                        {
                            readAttr_i[x][y] = (int)(readAttr_d[x][y] * shift);
                        }
                        else
                        {
                            readAttr_i[x][y] = (int)readAttr_d[x][y];
                        }
                    }
                }


                int correctnb = 0, incorrectnb = 0, correctknn = 0, incorrectknn = 0, correctsvm = 0, incorrectsvm = 0;

                var        learn = new NaiveBayesLearning();
                NaiveBayes nb    = learn.Learn(readAttr_i, readClass);
                var        test  = nb.Decide(genAttr_i);
                foreach (var v in test)
                {
                    if (v.Equals(genClass[test.IndexOf(v)]))
                    {
                        correctnb++;
                    }
                    else
                    {
                        incorrectnb++;
                    }
                }

                /////////////////////////////////////////////////////////////////////////

                var testknn = knn.Decide(genAttr_d);
                for (int i = 0; i < testknn.Length; i++)
                //foreach (var v in testknn)
                {
                    if (testknn[i].Equals(genClass[i]))
                    {
                        correctknn++;
                    }
                    else
                    {
                        incorrectknn++;
                    }
                }
                /////////////////////////////////////////////////////////////////////////

                try
                {
                    var teach = new MulticlassSupportVectorLearning <Gaussian>()
                    {
                        // Configure the learning algorithm to use SMO to train the
                        //  underlying SVMs in each of the binary class subproblems.
                        Learner = (param) => new SequentialMinimalOptimization <Gaussian>()
                        {
                            // Estimate a suitable guess for the Gaussian kernel's parameters.
                            // This estimate can serve as a starting point for a grid search.
                            UseKernelEstimation = true
                        }
                    };
                    var svm = teach.Learn(readAttr_d, readClass);

                    var testsvm = svm.Decide(genAttr_d);
                    for (int i = 0; i < testsvm.Length; i++)
                    //foreach (var v in testknn)
                    {
                        if (testsvm[i].Equals(genClass[i]))
                        {
                            correctsvm++;
                        }
                        else
                        {
                            incorrectsvm++;
                        }
                    }
                }
                catch (AggregateException) { }
                ////////////////////////////////////////////////////////////

                double[][] mixAttr_d = new double[genAttr_d.GetLength(0) + readAttr_d.GetLength(0),
                                                  genAttr_d[0].Length].ToJagged();
                int[] mixClass = new int[genClass.Length + readClass.Length];

                Array.Copy(readClass, mixClass, readClass.Length);
                Array.Copy(genClass, 0, mixClass, readClass.Length, genClass.Length);

                Array.Copy(readAttr_d, mixAttr_d, readAttr_d.Length);
                Array.Copy(genAttr_d, 0, mixAttr_d, readAttr_d.Length, genAttr_d.Length);

                int[][] mixAttr_i = new int[genAttr_i.GetLength(0) + readAttr_i.GetLength(0),
                                            genAttr_i[0].Length].ToJagged();

                Array.Copy(readAttr_i, mixAttr_i, readAttr_i.Length);
                Array.Copy(genAttr_i, 0, mixAttr_i, readAttr_i.Length, genAttr_i.Length);

                //KROSWALIDACJAAAAAAAAAAAAAAAAAA
                genClassesSqrt = (int)Math.Round(Math.Sqrt(genClass.Length));
                mixClassesSqrt = (int)Math.Round(Math.Sqrt(mixClass.Length));

                //KNN

                var crossvalidationRead = CrossValidation.Create(
                    k: 4,
                    learner: (p) => new KNearestNeighbors(k: readClassesSqrt),
                    loss: (actual, expected, p) => new ZeroOneLoss(expected).Loss(actual),
                    fit: (teacher, x, y, w) => teacher.Learn(x, y, w),
                    x: readAttr_d, y: readClass
                    );
                var resultRead = crossvalidationRead.Learn(readAttr_d, readClass);
                // We can grab some information about the problem:
                var numberOfSamplesRead = resultRead.NumberOfSamples;
                var numberOfInputsRead  = resultRead.NumberOfInputs;
                var numberOfOutputsRead = resultRead.NumberOfOutputs;

                var trainingErrorRead   = resultRead.Training.Mean;
                var validationErrorRead = resultRead.Validation.Mean;

                var    readCM       = resultRead.ToConfusionMatrix(readAttr_d, readClass);
                double readAccuracy = readCM.Accuracy;
                //////////////////////////////////////////////////////////
                var crossvalidationGen = CrossValidation.Create(
                    k: 4,
                    learner: (p) => new KNearestNeighbors(k: genClassesSqrt),
                    loss: (actual, expected, p) => new ZeroOneLoss(expected).Loss(actual),
                    fit: (teacher, x, y, w) => teacher.Learn(x, y, w),
                    x: genAttr_d, y: genClass
                    );
                var resultGen = crossvalidationGen.Learn(genAttr_d, genClass);
                // We can grab some information about the problem:
                var numberOfSamplesGen = resultGen.NumberOfSamples;
                var numberOfInputsGen  = resultGen.NumberOfInputs;
                var numberOfOutputsGen = resultGen.NumberOfOutputs;

                var    trainingErrorGen   = resultGen.Training.Mean;
                var    validationErrorGen = resultGen.Validation.Mean;
                var    genCM       = resultGen.ToConfusionMatrix(genAttr_d, genClass);
                double genAccuracy = genCM.Accuracy;
                //////////////////////////////////////////////////////////

                var crossvalidationMix = CrossValidation.Create(
                    k: 4,
                    learner: (p) => new KNearestNeighbors(k: mixClassesSqrt),
                    loss: (actual, expected, p) => new ZeroOneLoss(expected).Loss(actual),
                    fit: (teacher, x, y, w) => teacher.Learn(x, y, w),
                    x: mixAttr_d, y: mixClass
                    );
                var resultMix = crossvalidationMix.Learn(readAttr_d, readClass);
                // We can grab some information about the problem:
                var numberOfSamplesMix = resultMix.NumberOfSamples;
                var numberOfInputsMix  = resultMix.NumberOfInputs;
                var numberOfOutputsMix = resultMix.NumberOfOutputs;

                var trainingErrorMix   = resultMix.Training.Mean;
                var validationErrorMix = resultMix.Validation.Mean;

                var    mixCM       = resultMix.ToConfusionMatrix(mixAttr_d, mixClass);
                double mixAccuracy = mixCM.Accuracy;

                //NB
                var crossvalidationReadnb = CrossValidation.Create(
                    k: 4,
                    learner: (p) => new NaiveBayesLearning(),
                    loss: (actual, expected, p) => new ZeroOneLoss(expected).Loss(actual),
                    fit: (teacher, x, y, w) => teacher.Learn(x, y, w),
                    x: readAttr_i, y: readClass
                    );
                var resultReadnb = crossvalidationReadnb.Learn(readAttr_i, readClass);
                // We can grab some information about the problem:
                var numberOfSamplesReadnb = resultReadnb.NumberOfSamples;
                var numberOfInputsReadnb  = resultReadnb.NumberOfInputs;
                var numberOfOutputsReadnb = resultReadnb.NumberOfOutputs;

                var trainingErrorReadnb   = resultReadnb.Training.Mean;
                var validationErrorReadnb = resultReadnb.Validation.Mean;

                var    readCMnb       = resultReadnb.ToConfusionMatrix(readAttr_i, readClass);
                double readAccuracynb = readCMnb.Accuracy;
                //////////////////////////////////////////////////////////
                var crossvalidationGennb = CrossValidation.Create(
                    k: 4,
                    learner: (p) => new NaiveBayesLearning(),
                    loss: (actual, expected, p) => new ZeroOneLoss(expected).Loss(actual),
                    fit: (teacher, x, y, w) => teacher.Learn(x, y, w),
                    x: genAttr_i, y: genClass
                    );
                var resultGennb = crossvalidationGennb.Learn(genAttr_i, genClass);
                // We can grab some information about the problem:
                var numberOfSamplesGennb = resultGennb.NumberOfSamples;
                var numberOfInputsGennb  = resultGennb.NumberOfInputs;
                var numberOfOutputsGennb = resultGennb.NumberOfOutputs;

                var    trainingErrorGennb   = resultGennb.Training.Mean;
                var    validationErrorGennb = resultGennb.Validation.Mean;
                var    genCMnb       = resultGennb.ToConfusionMatrix(genAttr_i, genClass);
                double genAccuracynb = genCMnb.Accuracy;
                //////////////////////////////////////////////////////////

                var crossvalidationMixnb = CrossValidation.Create(
                    k: 4,
                    learner: (p) => new NaiveBayesLearning(),
                    loss: (actual, expected, p) => new ZeroOneLoss(expected).Loss(actual),
                    fit: (teacher, x, y, w) => teacher.Learn(x, y, w),
                    x: mixAttr_i, y: mixClass
                    );
                var resultMixnb = crossvalidationMixnb.Learn(mixAttr_i, mixClass);
                // We can grab some information about the problem:
                var numberOfSamplesMixnb = resultMixnb.NumberOfSamples;
                var numberOfInputsMixnb  = resultMixnb.NumberOfInputs;
                var numberOfOutputsMixnb = resultMixnb.NumberOfOutputs;

                var trainingErrorMixnb   = resultMixnb.Training.Mean;
                var validationErrorMixnb = resultMixnb.Validation.Mean;

                var    mixCMnb       = resultMixnb.ToConfusionMatrix(mixAttr_i, mixClass);
                double mixAccuracynb = mixCMnb.Accuracy;

                //SVM
                double readAccuracysvm = 0, genAccuracysvm = 0, mixAccuracysvm = 0;
                try
                {
                    var crossvalidationReadsvm = CrossValidation.Create(
                        k: 4,
                        learner: (p) => new MulticlassSupportVectorLearning <Gaussian>()
                    {
                        Learner = (param) => new SequentialMinimalOptimization <Gaussian>()
                        {
                            UseKernelEstimation = true
                        }
                    },
                        loss: (actual, expected, p) => new ZeroOneLoss(expected).Loss(actual),
                        fit: (teacher, x, y, w) => teacher.Learn(x, y, w),
                        x: readAttr_d, y: readClass
                        );
                    //crossvalidationReadsvm.ParallelOptions.MaxDegreeOfParallelism = 1;
                    var resultReadsvm = crossvalidationReadsvm.Learn(readAttr_d, readClass);
                    // We can grab some information about the problem:
                    var numberOfSamplesReadsvm = resultReadsvm.NumberOfSamples;
                    var numberOfInputsReadsvm  = resultReadsvm.NumberOfInputs;
                    var numberOfOutputsReadsvm = resultReadsvm.NumberOfOutputs;

                    var trainingErrorReadsvm   = resultReadsvm.Training.Mean;
                    var validationErrorReadsvm = resultReadsvm.Validation.Mean;

                    var readCMsvm = resultReadsvm.ToConfusionMatrix(readAttr_d, readClass);
                    readAccuracysvm = readCMsvm.Accuracy;
                }
                catch (AggregateException) { }
                //////////////////////////////////////////////////////////
                try
                {
                    var crossvalidationGensvm = CrossValidation.Create(
                        k: 4,
                        learner: (p) => new MulticlassSupportVectorLearning <Gaussian>()
                    {
                        Learner = (param) => new SequentialMinimalOptimization <Gaussian>()
                        {
                            UseKernelEstimation = true
                        }
                    },
                        loss: (actual, expected, p) => new ZeroOneLoss(expected).Loss(actual),
                        fit: (teacher, x, y, w) => teacher.Learn(x, y, w),
                        x: genAttr_d, y: genClass
                        );
                    var resultGensvm = crossvalidationGensvm.Learn(genAttr_d, genClass);
                    // We can grab some information about the problem:
                    var numberOfSamplesGensvm = resultGensvm.NumberOfSamples;
                    var numberOfInputsGensvm  = resultGensvm.NumberOfInputs;
                    var numberOfOutputsGensvm = resultGensvm.NumberOfOutputs;

                    var trainingErrorGensvm   = resultGensvm.Training.Mean;
                    var validationErrorGensvm = resultGensvm.Validation.Mean;
                    var genCMsvm = resultGensvm.ToConfusionMatrix(genAttr_d, genClass);
                    genAccuracysvm = genCMsvm.Accuracy;
                }
                catch (AggregateException) { }
                //////////////////////////////////////////////////////////
                try
                {
                    var crossvalidationMixsvm = CrossValidation.Create(
                        k: 4,
                        learner: (p) => new MulticlassSupportVectorLearning <Gaussian>()
                    {
                        Learner = (param) => new SequentialMinimalOptimization <Gaussian>()
                        {
                            UseKernelEstimation = true
                        }
                    },
                        loss: (actual, expected, p) => new ZeroOneLoss(expected).Loss(actual),
                        fit: (teacher, x, y, w) => teacher.Learn(x, y, w),
                        x: mixAttr_d, y: mixClass
                        );
                    var resultMixsvm = crossvalidationMixsvm.Learn(mixAttr_d, mixClass);
                    // We can grab some information about the problem:
                    var numberOfSamplesMixsvm = resultMixsvm.NumberOfSamples;
                    var numberOfInputsMixsvm  = resultMixsvm.NumberOfInputs;
                    var numberOfOutputsMixsvm = resultMixsvm.NumberOfOutputs;

                    var trainingErrorMixsvm   = resultMixsvm.Training.Mean;
                    var validationErrorMixsvm = resultMixsvm.Validation.Mean;

                    var mixCMsvm = resultMixsvm.ToConfusionMatrix(mixAttr_d, mixClass);
                    mixAccuracysvm = mixCMsvm.Accuracy;
                }
                catch (AggregateException) { }
                /////////////////////////////////////////////////
                if (correctsvm == 0 && incorrectsvm == 0)
                {
                    incorrectsvm = 1;
                }
                double knnRatio = 100.0 * correctknn / (correctknn + incorrectknn),
                       nbRatio  = 100.0 * correctnb / (correctnb + incorrectnb),
                       svmRatio = 100.0 * correctsvm / (correctsvm + incorrectsvm);
                System.Windows.MessageBox.Show(
                    "K Nearest Neighbours Classification:\nGenerated Data Correct Ratio: " +
                    knnRatio.ToString("0.00", System.Globalization.CultureInfo.InvariantCulture) + "%\n" +
                    "Original Data X-Validation Accuracy: "
                    + (100.0 * readAccuracy).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture)
                    + "%\n" + "Generated Data X-Validation Accuracy: "
                    + (100.0 * genAccuracy).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture)
                    + "%\n" + "Mixed Data X-Validation Accuracy: "
                    + (100.0 * mixAccuracy).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture)
                    + "%\n"
                    + "\n\n" + "Naive Bayes Classification:\nGenerated Data Correct Ratio: " +
                    nbRatio.ToString("0.00", System.Globalization.CultureInfo.InvariantCulture) + "%\n" +
                    "Original Data X-Validation Accuracy: "
                    + (100.0 * readAccuracynb).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture)
                    + "%\n" + "Generated Data X-Validation Accuracy: "
                    + (100.0 * genAccuracynb).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture)
                    + "%\n" + "Mixed Data X-Validation Accuracy: "
                    + (100.0 * mixAccuracynb).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture)
                    + "%\n" +
                    "\n\n" + "Support Vector Machine Classification:\nGenerated Data Correct Ratio: " +
                    svmRatio.ToString("0.00", System.Globalization.CultureInfo.InvariantCulture) + "%\n" +
                    "Original Data X-Validation Accuracy: "
                    + (100.0 * readAccuracysvm).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture)
                    + "%\n" + "Generated Data X-Validation Accuracy: "
                    + (100.0 * genAccuracysvm).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture)
                    + "%\n" + "Mixed Data X-Validation Accuracy: "
                    + (100.0 * mixAccuracysvm).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture)
                    + "%\n",
                    "Data Testing - extending dataset",
                    System.Windows.MessageBoxButton.OK);

                /*
                 * ///TEMP - do eksportowania danych do arkusza
                 *
                 *  using (var write = new System.IO.StreamWriter("TestDataDump.txt")){
                 *      write.WriteLine("ScoreTreshold," + scoreH.ToString());
                 *      write.WriteLine("NewDataAmt," + newData.ToString());
                 *      write.WriteLine("Generated Data Correct Ratio," +
                 *          knnRatio.ToString("0.00", System.Globalization.CultureInfo.InvariantCulture) + "," +
                 *          nbRatio.ToString("0.00", System.Globalization.CultureInfo.InvariantCulture) +"," +
                 *          svmRatio.ToString("0.00", System.Globalization.CultureInfo.InvariantCulture));
                 *      write.WriteLine("Original Data X-Validation Accuracy," +
                 *          (100.0 * readAccuracy).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture) + "," +
                 *          (100.0 * readAccuracynb).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture) + "," +
                 *          (100.0 * readAccuracysvm).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture));
                 *      write.WriteLine("Generated Data X-Validation Accuracy," +
                 *          (100.0 * genAccuracy).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture) + "," +
                 *          (100.0 * genAccuracynb).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture) + "," +
                 *          (100.0 * genAccuracysvm).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture));
                 *  write.WriteLine("Mixed Data X-Validation Accuracy," +
                 *          (100.0 * mixAccuracy).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture) + "," +
                 *          (100.0 * mixAccuracynb).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture) + "," +
                 *          (100.0 * mixAccuracysvm).ToString("0.00", System.Globalization.CultureInfo.InvariantCulture));
                 *
                 * }
                 *  System.Diagnostics.Process.Start("TestDataDump.txt");
                 */
            }
            dialogResult = System.Windows.MessageBox.Show("Do you want to open the file with generated data?", "Data testing - extended data", System.Windows.MessageBoxButton.YesNo);
            if (dialogResult == MessageBoxResult.Yes)
            {
                System.Diagnostics.Process.Start(savefiledir);
            }
        }
        private void Video1_Proccess1()
        {
            //if (_capture1 != null && _capture1.Ptr != IntPtr.Zero)
            //{

            int  war_at_frame = 0;
            bool warning      = false;

            while (camera_1.frameNum < total_frames1 - 10)
            {
                //Console.WriteLine(camera_1.frameNum);
                if (camera_1.frameNum % 20 == 0)
                {
                    count = 0;
                }

                abnormal_vote = 0;
                normal_vote   = 0;
                try
                {
                    double[] fe = F_E.extract(vid1, camera_1.frameNum);
                    if (fe[0] == null || fe[1] == null)
                    {
                        fe[0] = 240;
                        fe[0] = 170;
                    }
                    int[] fff = new int[] { (int)fe[0], (int)fe[1] };

                    //int knn_answer = knn.Decide(fe);
                    int  RF_answer = RF.Decide(fe);
                    bool LR_answer = LR.Decide(fe);
                    //bool SVM_answer = SVM.Decide(fe);
                    int    NB_answer = NB.Decide(fff);
                    double fl1       = HMM.LogLikelihood(fff);

                    if (chocking || lying)
                    {
                        Console.WriteLine(fl1);
                        if (fl1.CompareTo(-8.3) == 1)
                        {
                            hmm_count++;
                        }
                    }


                    else if (violence)
                    {
                        if (RF_answer == 1)
                        {
                            abnormal_vote += 0.978546619845336;
                        }
                        else
                        {
                            normal_vote += 0.978546619845336;
                        }

                        if (LR_answer)
                        {
                            abnormal_vote += 0.8428031393318365;
                        }
                        else
                        {
                            normal_vote += 0.8428031393318365;
                        }

                        if (NB_answer == 1)
                        {
                            abnormal_vote += 0.8746569953754341;
                        }
                        else
                        {
                            normal_vote += 0.8746569953754341;
                        }

                        if (abnormal_vote.CompareTo(normal_vote) == 1)
                        {
                            count++;
                        }
                    }

                    if (hmm_count >= 2 || count >= 4)
                    {
                        if (count >= 4)
                        {
                            count = 0;
                        }
                        if (hmm_count >= 2)
                        {
                            hmm_count = 0;
                        }

                        this.pictureBox3.Invoke((MethodInvoker) delegate
                        {
                            // Running on the UI thread
                            pictureBox3.Image = Properties.Resources.warning;
                        });

                        if (alarm)
                        {
                            wplayer.URL = "D:\\2\\Real-Time Abnormal Event Detection And Tracking In Video\\Alarm.mp3";
                            wplayer.controls.play();
                        }



                        //pictureBox3.Image = Properties.Resources.warning;
                        warning      = true;
                        war_at_frame = camera_1.frameNum;

                        Media.Crop_video(vid1, (int)camera_1.frameNum / (fbs + 5), 30);
                        Media.thumbnail(vid1, (int)camera_1.frameNum / (fbs + 5));
                        Image image = Image.FromFile(@"D:\2\Real-Time Abnormal Event Detection And Tracking In Video\croped_videos\crop" + Media.num.ToString() + ".jpg");
                        dataGridView1.Rows.Add(image, @"D:\2\Real-Time Abnormal Event Detection And Tracking In Video\croped_videos\crop" + Media.num.ToString() + ".mpg");
                        Media.num++;
                    }

                    if (warning && camera_1.frameNum >= (war_at_frame + 10))
                    {
                        this.pictureBox3.Invoke((MethodInvoker) delegate
                        {
                            // Running on the UI thread
                            pictureBox3.Image = Properties.Resources._checked;
                        });
                        //pictureBox3.Image = Properties.Resources._checked;
                        warning = false;
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine("1--- ", e.Message);
                }
            }
        }
Exemple #22
0
 public override int Predict(FeatureVector feature)
 {
     return(_bayes.Decide(Array.ConvertAll(feature.BandIntensities, s => (double)s / ushort.MaxValue)));
 }
Exemple #23
0
        public void laplace_smoothing_missing_sample()
        {
            #region doc_laplace
            // To test the effectiveness of the Laplace rule for when
            // an example of a symbol is not present in the training set,
            // lets create dataset where the second column could contain
            // values 0, 1 or 2 but only actually contains examples with
            // containing 1 and 2:

            int[][] inputs =
            {
                //      input     output
                new [] { 0, 1 }, //  0
                new [] { 0, 2 }, //  0
                new [] { 0, 1 }, //  0
                new [] { 1, 2 }, //  1
                new [] { 0, 2 }, //  1
                new [] { 0, 2 }, //  1
                new [] { 1, 1 }, //  2
                new [] { 0, 1 }, //  2
                new [] { 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 1, 1, 1, 2, 2, 2,
            };

            // Since the data is not enough to determine which symbols we are
            // expecting in our model, we will have to specify the model by
            // hand. The first column can assume 2 different values, whereas
            // the third column can assume 3:
            var bayes = new NaiveBayes(classes: 3, symbols: new[] { 2, 3 });

            // Now we can create a learning algorithm
            var learning = new NaiveBayesLearning()
            {
                Model = bayes
            };

            // Enable the use of the Laplace rule
            learning.Options.InnerOption.UseLaplaceRule = true;

            // Learn the Naive Bayes model
            learning.Learn(inputs, outputs);

            // Estimate a sample with 0 in the second col
            int answer = bayes.Decide(new int[] { 0, 1 });
            #endregion

            Assert.AreEqual(0, answer);

            double prob = bayes.Probability(new int[] { 0, 1 }, out answer);
            Assert.AreEqual(0, answer);
            Assert.AreEqual(0.52173913043478259, prob, 1e-10);

            double error = new ZeroOneLoss(outputs)
            {
                Mean = true
            }.Loss(bayes.Decide(inputs));

            Assert.AreEqual(2 / 9.0, error);
        }
Exemple #24
0
        public void ComputeTest()
        {
            #region doc_mitchell
            DataTable data = new DataTable("Mitchell's Tennis Example");

            data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");

            data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
            data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
            data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
            data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
            data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
            data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
            data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
            data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
            data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");
            #endregion

            #region doc_codebook
            // Create a new codification codebook to
            // convert strings into discrete symbols
            Codification codebook = new Codification(data,
                                                     "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");

            // Extract input and output pairs to train
            DataTable symbols = codebook.Apply(data);
            int[][]   inputs  = symbols.ToArray <int>("Outlook", "Temperature", "Humidity", "Wind");
            int[]     outputs = symbols.ToArray <int>("PlayTennis");
            #endregion

            #region doc_learn
            // Create a new Naive Bayes learning
            var learner = new NaiveBayesLearning();

            // Learn a Naive Bayes model from the examples
            NaiveBayes nb = learner.Learn(inputs, outputs);
            #endregion


            #region doc_test
            // Consider we would like to know whether one should play tennis at a
            // sunny, cool, humid and windy day. Let us first encode this instance
            int[] instance = codebook.Translate("Sunny", "Cool", "High", "Strong");

            // Let us obtain the numeric output that represents the answer
            int c = nb.Decide(instance); // answer will be 0

            // Now let us convert the numeric output to an actual "Yes" or "No" answer
            string result = codebook.Translate("PlayTennis", c); // answer will be "No"

            // We can also extract the probabilities for each possible answer
            double[] probs = nb.Probabilities(instance); // { 0.795, 0.205 }
            #endregion

            Assert.AreEqual("No", result);
            Assert.AreEqual(0, c);
            Assert.AreEqual(0.795, probs[0], 1e-3);
            Assert.AreEqual(0.205, probs[1], 1e-3);
            Assert.AreEqual(1, probs.Sum(), 1e-10);
            Assert.IsFalse(double.IsNaN(probs[0]));
            Assert.AreEqual(2, probs.Length);
        }
Exemple #25
0
        public Movie RecommendMovie()
        {
            Random       rnd               = new Random();
            Movie        selectedMovie     = null;
            List <Movie> allMoviesInGenere = null;
            int          moviesLength      = db.Movies.Count();

            if (moviesLength == 0)
            {
                return(new Movie());
            }

            if (Session["User"] == null)
            {
                selectedMovie = db.Movies.OrderBy(r => Guid.NewGuid()).First();
            }
            else
            {
                int userID = ((User)Session["User"]).Id;

                int[] selectedGeneres = db.Reviews.Where(review => review.UserID == userID).Select(review => review.Movie.GenereID).ToArray();

                if (selectedGeneres.Length == 0)
                {
                    selectedMovie = db.Movies.OrderBy(r => Guid.NewGuid()).First();
                }
                else if (selectedGeneres.Distinct().Count() == 1)
                {
                    int selectedGenereId = selectedGeneres[0];

                    allMoviesInGenere = db.Generes.First(x => x.Id == selectedGenereId).Movies;

                    selectedMovie = allMoviesInGenere[rnd.Next(allMoviesInGenere.Count)];
                }
                else
                {
                    int[][] dataset = new int[selectedGeneres.Length][];

                    Dictionary <int, int> mapper       = new Dictionary <int, int>();
                    Dictionary <int, int> mapperOpsite = new Dictionary <int, int>();

                    int counter = 0;

                    for (int genereIndex = 0; genereIndex < selectedGeneres.Length; genereIndex++)
                    {
                        dataset[genereIndex] = new int[1];

                        if (!mapper.ContainsKey(selectedGeneres[genereIndex]))
                        {
                            mapper[selectedGeneres[genereIndex]] = counter;
                            mapperOpsite[counter] = selectedGeneres[genereIndex];

                            counter++;
                        }
                    }

                    int[] mappedLabels = new int[selectedGeneres.Length];

                    for (int i = 0; i < selectedGeneres.Length; i++)
                    {
                        mappedLabels[i] = mapper[selectedGeneres[i]];
                    }


                    var        learner = new NaiveBayesLearning();
                    NaiveBayes nb      = learner.Learn(dataset, mappedLabels);

                    int[] prediction = new int[] { default(int) };

                    int selectedGenereMapped = nb.Decide(prediction);

                    int selectedIndex = mapperOpsite[selectedGenereMapped];

                    allMoviesInGenere = db.Generes.First(x => x.Id == selectedIndex).Movies;

                    selectedMovie = allMoviesInGenere[rnd.Next(allMoviesInGenere.Count)];
                }
            }

            return(selectedMovie);
        }