Beispiel #1
0
        public void ExecuteTest()
        {
            string[][] words =
            {
                new string[] { "今日", "は", "いい", "天気", "です"   },
                new string[] { "明日", "も", "いい", "天気", "でしょう" }
            };

            var codebook = new BagOfWords()
            {
                //MaximumOccurance = 1 // the resulting vector will have only 0's and 1's
                MaximumOccurance = int.MaxValue
            };

            // Compute the codebook (note: this would have to be done only for the training set)
            codebook.Learn(words);

            // Now, we can use the learned codebook to extract fixed-length
            // representations of the different texts (paragraphs) above:

            // Extract a feature vector from the text 1:
            double[] bow1 = codebook.Transform(words[0]);

            // Extract a feature vector from the text 2:
            double[] bow2 = codebook.Transform(words[1]);

            // we could also have transformed everything at once, i.e.
            double[][] bow = codebook.Transform(words);


            // Now, since we have finite length representations (both bow1 and bow2 should
            // have the same size), we can pass them to any classifier or machine learning
            // method. For example, we can pass them to a Logistic Regression Classifier to
            // discern between the first and second paragraphs

            // Lets create a Logistic classifier to separate the two paragraphs:
            var learner = new IterativeReweightedLeastSquares <LogisticRegression>()
            {
                Tolerance      = 1e-4, // Let's set some convergence parameters
                Iterations     = 100,  // maximum number of iterations to perform
                Regularization = 0
            };

            // Now, we use the learning algorithm to learn the distinction between the two:
            LogisticRegression reg = learner.Learn(new[] { bow1, bow2 }, new[] { false, true });

            // Finally, we can predict using the classifier:
            bool c1 = reg.Decide(bow1); // Should be false
            bool c2 = reg.Decide(bow2); // Should be true

            Console.WriteLine(c1);
            Console.WriteLine(c2);
        }
Beispiel #2
0
        static public int [] IterativeLeastSquares(double[][] input1, int[] output1, string fName)
        {
            double[] labels  = System.Array.ConvertAll <int, double>(output1, x => x);
            var      learner = new IterativeReweightedLeastSquares <LogisticRegression>()
            {
                // Gets or sets the tolerance value used to determine whether the algorithm has converged.
                Tolerance     = 1e-4, // Let's set some convergence parameters
                MaxIterations = 10,
                //MaxIterations = 100,  // maximum number of iterations to perform
                Regularization = 0
            };

            // Now, we can use the learner to finally estimate our model:
            LogisticRegression regression = learner.Learn(input1, output1);

            double [] coefficients = learner.Solution;

            double[] scores = regression.Probability(input1);

            regression.Save(fName.Replace(".csv", ".IRLS.save"), compression: SerializerCompression.None);

            // Finally, if we would like to arrive at a conclusion regarding
            // each sample, we can use the Decide method, which will transform
            // the probabilities (from 0 to 1) into actual true/false values:

            return(Funcs.Utility.BoolToInt(regression.Decide(input1)));

            // mean(double(p == y)) * 100);
        }
    // Compute car control based on sensor readings
    void ComputeControl(float sensorL, float sensorF, float sensorR, float carVelocity, float forward)
    {
        // Inputs
        double[][] inputsT = new double[1][];
        inputsT[0]    = new double[4];
        inputsT[0][0] = sensorL;
        inputsT[0][1] = sensorF;
        inputsT[0][2] = sensorR;
        inputsT[0][3] = carVelocity;

        double[][] inputs = new double[1][];
        inputs[0]    = new double[5];
        inputs[0][0] = sensorL;
        inputs[0][1] = sensorF;
        inputs[0][2] = sensorR;
        inputs[0][3] = carVelocity;
        inputs[0][4] = forward;

        answerThrust = treeThrust.Decide(inputsT);
        answerSteer  = treeSteer.Decide(inputs);

        // Thrust

        if (answerThrust[0] == true)
        {
            thrust = pod[1];
            //Debug.Log("Acelera!");
        }
        else if (answerThrust[0] == false)
        {
            thrust = -pod[2];
            //Debug.Log("Freia!");
        }

        // Steer

        if (answerSteer[0] == true)
        {
            if (sensorLeft > sensorRight)
            {
                steer = pod[3];
                //Debug.Log("Vire a Esquerda!");
            }
            else
            {
                steer = -pod[3];
                //Debug.Log("Vire a Direita!");
            }
        }
        else if (answerSteer[0] == false)
        {
            steer = 0;
            //Debug.Log("Vai reto!");
        }

        // Command
        rb.AddRelativeForce(new Vector2(0f, thrust));
        rb.AddTorque(steer);
    }
Beispiel #4
0
        public bool classifierLR(data d)
        {
            int D = d.d;

            double[] input = new double[D];
            bool     output;

            for (int i = 0; i < D; ++i)
            {
                input[i] = d.msg[i];
            }

            output = regression.Decide(input);

            return(output);
        }
Beispiel #5
0
        private static void logisticRegression(double[][] inputs, int[] outputs)
        {
            // Create iterative re-weighted least squares for logistic regressions
            var teacher = new IterativeReweightedLeastSquares <LogisticRegression>()
            {
                MaxIterations  = 100,
                Regularization = 1e-6
            };

            // Use the teacher algorithm to learn the regression:
            LogisticRegression lr = teacher.Learn(inputs, outputs);

            // Classify the samples using the model
            bool[] answers = lr.Decide(inputs);

            // Convert to Int32 so we can plot:
            int[] zeroOneAnswers = answers.ToZeroOne();

            // Plot the results
            ScatterplotBox.Show("Expected results", inputs, outputs);
            ScatterplotBox.Show("Logistic Regression results", inputs, zeroOneAnswers)
            .Hold();
        }
Beispiel #6
0
        private static void ShowAccordIntermediateData(IDataView input, LogisticRegression accordModel)
        {
            var convertedInput = IDataViewToAccord(input).inputs;

            bool[] lrOutput        = accordModel.Decide(convertedInput);
            var    lrScore         = accordModel.Scores(convertedInput);
            var    lrProbability   = accordModel.Probabilities(convertedInput);
            var    inputEnumerable = mlContext.Data.CreateEnumerable <IDataViewWrapper>(input, true);

            sw.WriteLine("Label\tScore\tProbability\tPrediction");
            int count = 0;

            foreach (IDataViewWrapper pp in inputEnumerable)
            {
                int    label       = pp.Label ? 1 : 0;
                int    prediction  = lrOutput[count] ? 1 : 0;
                double score       = lrScore[count][1];
                double probability = lrProbability[count][1];
                sw.WriteLine("{0}\t{1}\t{2}\t{3}", label, score, probability, prediction);
                count++;
            }
            sw.Flush();
        }
        public void ComputeTest()
        {
            // Suppose we have the following data about some patients.
            // The first variable is continuous and represent patient
            // age. The second variable is dichotomic and give whether
            // they smoke or not (This is completely fictional data).
            double[][] input =
            {
                new double[] { 55, 0 }, // 0 - no cancer
                new double[] { 28, 0 }, // 0
                new double[] { 65, 1 }, // 0
                new double[] { 46, 0 }, // 1 - have cancer
                new double[] { 86, 1 }, // 1
                new double[] { 56, 1 }, // 1
                new double[] { 85, 0 }, // 0
                new double[] { 33, 0 }, // 0
                new double[] { 21, 1 }, // 0
                new double[] { 42, 1 }, // 1
            };

            // We also know if they have had lung cancer or not, and
            // we would like to know whether smoking has any connection
            // with lung cancer (This is completely fictional data).
            double[] output =
            {
                0, 0, 0, 1, 1, 1, 0, 0, 0, 1
            };



            // To verify this hypothesis, we are going to create a logistic
            // regression model for those two inputs (age and smoking).
            LogisticRegression regression = new LogisticRegression(inputs: 2);

            // Next, we are going to estimate this model. For this, we
            // will use the Iteratively Reweighted Least Squares method.
            var teacher = new IterativeReweightedLeastSquares(regression);

            teacher.Regularization = 0;

            // Now, we will iteratively estimate our model. The Run method returns
            // the maximum relative change in the model parameters and we will use
            // it as the convergence criteria.

            double delta = 0;

            do
            {
                // Perform an iteration
                delta = teacher.Run(input, output);
            } while (delta > 0.001);

            // At this point, we can compute the odds ratio of our variables.
            // In the model, the variable at 0 is always the intercept term,
            // with the other following in the sequence. Index 1 is the age
            // and index 2 is whether the patient smokes or not.

            // For the age variable, we have that individuals with
            //   higher age have 1.021 greater odds of getting lung
            //   cancer controlling for cigarette smoking.
            double ageOdds = regression.GetOddsRatio(1); // 1.0208597028836701

            // For the smoking/non smoking category variable, however, we
            //   have that individuals who smoke have 5.858 greater odds
            //   of developing lung cancer compared to those who do not
            //   smoke, controlling for age (remember, this is completely
            //   fictional and for demonstration purposes only).
            double smokeOdds = regression.GetOddsRatio(2); // 5.8584748789881331

            double[] actual = new double[output.Length];
            for (int i = 0; i < input.Length; i++)
            {
                actual[i] = regression.Compute(input[i]);
            }

            double[] expected =
            {
                0.21044171560168326,
                0.13242527535212373,
                0.65747803433771812,
                0.18122484822324372,
                0.74755661773156912,
                0.61450041841477232,
                0.33116705418194975,
                0.14474110902457912,
                0.43627109657399382,
                0.54419383282533118
            };

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i]);
            }

            Assert.AreEqual(1.0208597028836701, ageOdds, 1e-10);
            Assert.AreEqual(5.8584748789881331, smokeOdds, 1e-8);

            Assert.AreEqual(-2.4577464307294092, regression.Intercept, 1e-8);
            Assert.AreEqual(-2.4577464307294092, regression.Coefficients[0], 1e-8);
            Assert.AreEqual(0.020645118265359252, regression.Coefficients[1], 1e-10);
            Assert.AreEqual(1.7678893101571855, regression.Coefficients[2], 1e-8);

            bool[] actualOutput = regression.Decide(input);
            Assert.IsFalse(actualOutput[0]);
            Assert.IsFalse(actualOutput[1]);
            Assert.IsTrue(actualOutput[2]);
            Assert.IsFalse(actualOutput[3]);
            Assert.IsTrue(actualOutput[4]);
            Assert.IsTrue(actualOutput[5]);
            Assert.IsFalse(actualOutput[6]);
            Assert.IsFalse(actualOutput[7]);
            Assert.IsFalse(actualOutput[8]);
            Assert.IsTrue(actualOutput[9]);
        }
        public void learn_new_mechanism()
        {
            #region doc_log_reg_1
            // Suppose we have the following data about some patients.
            // The first variable is continuous and represent patient
            // age. The second variable is dichotomic and give whether
            // they smoke or not (This is completely fictional data).

            // We also know if they have had lung cancer or not, and
            // we would like to know whether smoking has any connection
            // with lung cancer (This is completely fictional data).

            double[][] input =
            {                           // age, smokes?, had cancer?
                new double[] { 55, 0 }, // false - no cancer
                new double[] { 28, 0 }, // false
                new double[] { 65, 1 }, // false
                new double[] { 46, 0 }, // true  - had cancer
                new double[] { 86, 1 }, // true
                new double[] { 56, 1 }, // true
                new double[] { 85, 0 }, // false
                new double[] { 33, 0 }, // false
                new double[] { 21, 1 }, // false
                new double[] { 42, 1 }, // true
            };

            bool[] output = // Whether each patient had lung cancer or not
            {
                false, false, false, true, true, true, false, false, false, true
            };


            // To verify this hypothesis, we are going to create a logistic
            // regression model for those two inputs (age and smoking), learned
            // using a method called "Iteratively Reweighted Least Squares":

            var learner = new IterativeReweightedLeastSquares <LogisticRegression>()
            {
                Tolerance      = 1e-4, // Let's set some convergence parameters
                Iterations     = 100,  // maximum number of iterations to perform
                Regularization = 0
            };

            // Now, we can use the learner to finally estimate our model:
            LogisticRegression regression = learner.Learn(input, output);

            // At this point, we can compute the odds ratio of our variables.
            // In the model, the variable at 0 is always the intercept term,
            // with the other following in the sequence. Index 1 is the age
            // and index 2 is whether the patient smokes or not.

            // For the age variable, we have that individuals with
            //   higher age have 1.021 greater odds of getting lung
            //   cancer controlling for cigarette smoking.
            double ageOdds = regression.GetOddsRatio(1); // 1.0208597028836701

            // For the smoking/non smoking category variable, however, we
            //   have that individuals who smoke have 5.858 greater odds
            //   of developing lung cancer compared to those who do not
            //   smoke, controlling for age (remember, this is completely
            //   fictional and for demonstration purposes only).
            double smokeOdds = regression.GetOddsRatio(2); // 5.8584748789881331

            // If we would like to use the model to predict a probability for
            // each patient regarding whether they are at risk of cancer or not,
            // we can use the Probability function:

            double[] scores = regression.Probability(input);

            // Finally, if we would like to arrive at a conclusion regarding
            // each patient, we can use the Decide method, which will transform
            // the probabilities (from 0 to 1) into actual true/false values:

            bool[] actual = regression.Decide(input);
            #endregion

            double[] expected =
            {
                0.21044171560168326,
                0.13242527535212373,
                0.65747803433771812,
                0.18122484822324372,
                0.74755661773156912,
                0.61450041841477232,
                0.33116705418194975,
                0.14474110902457912,
                0.43627109657399382,
                0.54419383282533118
            };

            for (int i = 0; i < scores.Length; i++)
            {
                Assert.AreEqual(expected[i], scores[i], 1e-8);
            }

            double[] transform = regression.Transform(input, scores);
            for (int i = 0; i < scores.Length; i++)
            {
                Assert.AreEqual(expected[i], transform[i], 1e-8);
            }

            Assert.AreEqual(1.0208597028836701, ageOdds, 1e-10);
            Assert.AreEqual(5.8584748789881331, smokeOdds, 1e-6);

            Assert.AreEqual(-2.4577464307294092, regression.Intercept, 1e-8);
            Assert.AreEqual(-2.4577464307294092, regression.Coefficients[0], 1e-8);
            Assert.AreEqual(0.020645118265359252, regression.Coefficients[1], 1e-10);
            Assert.AreEqual(1.7678893101571855, regression.Coefficients[2], 1e-8);

            Assert.IsFalse(actual[0]);
            Assert.IsFalse(actual[1]);
            Assert.IsTrue(actual[2]);
            Assert.IsFalse(actual[3]);
            Assert.IsTrue(actual[4]);
            Assert.IsTrue(actual[5]);
            Assert.IsFalse(actual[6]);
            Assert.IsFalse(actual[7]);
            Assert.IsFalse(actual[8]);
            Assert.IsTrue(actual[9]);
        }
Beispiel #9
0
        public void learn_test()
        {
            #region doc_learn
            // The Bag-Of-Words model can be used to extract finite-length feature
            // vectors from sequences of arbitrary length, like for example, texts:


            string[] texts =
            {
                @"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas molestie malesuada 
                  nisi et placerat. Curabitur blandit porttitor suscipit. Nunc facilisis ultrices felis,
                  vitae luctus arcu semper in. Fusce ut felis ipsum. Sed faucibus tortor ut felis placerat
                  euismod. Vestibulum pharetra velit et dolor ornare quis malesuada leo aliquam. Aenean 
                  lobortis, tortor iaculis vestibulum dictum, tellus nisi vestibulum libero, ultricies 
                  pretium nisi ante in neque. Integer et massa lectus. Aenean ut sem quam. Mauris at nisl 
                  augue, volutpat tempus nisl. Suspendisse luctus convallis metus, vitae pretium risus 
                  pretium vitae. Duis tristique euismod aliquam",

                @"Sed consectetur nisl et diam mattis varius. Aliquam ornare tincidunt arcu eget adipiscing. 
                  Etiam quis augue lectus, vel sollicitudin lorem. Fusce lacinia, leo non porttitor adipiscing, 
                  mauris purus lobortis ipsum, id scelerisque erat neque eget nunc. Suspendisse potenti. Etiam 
                  non urna non libero pulvinar consequat ac vitae turpis. Nam urna eros, laoreet id sagittis eu,
                  posuere in sapien. Phasellus semper convallis faucibus. Nulla fermentum faucibus tellus in 
                  rutrum. Maecenas quis risus augue, eu gravida massa."
            };

            string[][] words = texts.Tokenize();

            // Create a new BoW with options:
            var codebook = new BagOfWords()
            {
                MaximumOccurance = 1 // the resulting vector will have only 0's and 1's
            };

            // Compute the codebook (note: this would have to be done only for the training set)
            codebook.Learn(words);


            // Now, we can use the learned codebook to extract fixed-length
            // representations of the different texts (paragraphs) above:

            // Extract a feature vector from the text 1:
            double[] bow1 = codebook.Transform(words[0]);

            // Extract a feature vector from the text 2:
            double[] bow2 = codebook.Transform(words[1]);

            // we could also have transformed everything at once, i.e.
            // double[][] bow = codebook.Transform(words);


            // Now, since we have finite length representations (both bow1 and bow2 should
            // have the same size), we can pass them to any classifier or machine learning
            // method. For example, we can pass them to a Logistic Regression Classifier to
            // discern between the first and second paragraphs

            // Lets create a Logistic classifier to separate the two paragraphs:
            var learner = new IterativeReweightedLeastSquares <LogisticRegression>()
            {
                Tolerance      = 1e-4, // Let's set some convergence parameters
                Iterations     = 100,  // maximum number of iterations to perform
                Regularization = 0
            };

            // Now, we use the learning algorithm to learn the distinction between the two:
            LogisticRegression reg = learner.Learn(new[] { bow1, bow2 }, new[] { false, true });

            // Finally, we can predict using the classifier:
            bool c1 = reg.Decide(bow1); // Should be false
            bool c2 = reg.Decide(bow2); // Should be true
            #endregion

            Assert.AreEqual(bow1.Length, 99);
            Assert.AreEqual(bow2.Length, 99);

            Assert.AreEqual(bow1.Sum(), 67);
            Assert.AreEqual(bow2.Sum(), 63);

            Assert.IsFalse(c1);
            Assert.IsTrue(c2);
        }
Beispiel #10
0
        static void Main(string[] args)
        {
            if (args.Length > 3 | args.Length < 1 | args.Length < 3)
            {
                Console.WriteLine("Requires a previously trained and saved Model File");
                Console.WriteLine("Usage <testfile> <label file> <Model File>");
                System.Environment.Exit(-1);
            }

            Console.WriteLine("Logisitic Regression Prediction\n");
            string testFname   = args[0];
            string labelsFname = args[1];
            string ModelFname  = args[2];


            double[,] Rawdata;
            double[,] labeldata;
            // Read in the test data, validate file existence by attempting to open the files first
            try
            {
                FileStream fs = File.Open(testFname, FileMode.Open, FileAccess.Write, FileShare.None);
                fs.Close();
                // Reuse fs for validating labels
                fs = File.Open(labelsFname, FileMode.Open, FileAccess.Write, FileShare.None);
                fs.Close();

                fs = File.Open(ModelFname, FileMode.Open, FileAccess.Read, FileShare.None);
                fs.Close();
            }
            catch (Exception e)
            {
                Console.WriteLine("Error opening file{0}", e);
                System.Environment.Exit(-1);
            }
            using (CsvReader reader = new CsvReader(testFname, hasHeaders: false))
            {
                Rawdata = reader.ToMatrix();
            }
            using (CsvReader reader = new CsvReader(labelsFname, hasHeaders: false))
            {
                labeldata = reader.ToMatrix();
            }

            // Convert Raw data to Jagged array
            double[][] testdata = Rawdata.ToJagged();
            int[]      output1  = funcs.convetToJaggedArray(labeldata);

            int [] answers = new int[labeldata.GetLength(0)];

            // For Accord.net Logistic Regression the input data needs to be in Jagged Arrays
            // Labels can either be int (1,0) or bools
            if (ModelFname.IndexOf("bfgs", StringComparison.OrdinalIgnoreCase) >= 0)
            {
                // Load a BFGS regression model
                try
                {
                    MultinomialLogisticRegression mlr = Serializer.Load <MultinomialLogisticRegression>(ModelFname);
                    answers = mlr.Decide(testdata);
                } catch (Exception e)
                {
                    Console.WriteLine("Error opening model file: {0}", ModelFname);
                    Console.WriteLine("Exception {0}", e);
                    System.Environment.Exit(-1);
                }
            }


            else if (ModelFname.IndexOf("pcd", StringComparison.OrdinalIgnoreCase) >= 0)
            {
                LogisticRegression regression = new LogisticRegression();
                try
                {
                    regression = Serializer.Load <LogisticRegression>(ModelFname);
                    answers    = funcs.BoolToInt(regression.Decide(testdata));
                }
                catch (Exception e)
                {
                    Console.WriteLine("Error opening model file: {0}", ModelFname);
                    Console.WriteLine("Exception {0}", e);
                    System.Environment.Exit(-1);
                }
            }

            Console.WriteLine("Successfully loaded model file => {0}", ModelFname);

            double subtotal = 0;
            int    index    = 0;

            foreach (var result in answers)
            {
                if (result == output1[index])
                {
                    subtotal = subtotal + 1;
                }
                index++;
            }
            double accuracy = subtotal / answers.Count();

            Console.WriteLine("Predicted accuracy using model:{0} is, {1}", ModelFname, Math.Round(accuracy * 100, 2));
        }
        private void Video1_Proccess1()
        {
            //if (_capture1 != null && _capture1.Ptr != IntPtr.Zero)
            //{

            int  war_at_frame = 0;
            bool warning      = false;

            while (camera_1.frameNum < total_frames1 - 10)
            {
                //Console.WriteLine(camera_1.frameNum);
                if (camera_1.frameNum % 20 == 0)
                {
                    count = 0;
                }

                abnormal_vote = 0;
                normal_vote   = 0;
                try
                {
                    double[] fe = F_E.extract(vid1, camera_1.frameNum);
                    if (fe[0] == null || fe[1] == null)
                    {
                        fe[0] = 240;
                        fe[0] = 170;
                    }
                    int[] fff = new int[] { (int)fe[0], (int)fe[1] };

                    //int knn_answer = knn.Decide(fe);
                    int  RF_answer = RF.Decide(fe);
                    bool LR_answer = LR.Decide(fe);
                    //bool SVM_answer = SVM.Decide(fe);
                    int    NB_answer = NB.Decide(fff);
                    double fl1       = HMM.LogLikelihood(fff);

                    if (chocking || lying)
                    {
                        Console.WriteLine(fl1);
                        if (fl1.CompareTo(-8.3) == 1)
                        {
                            hmm_count++;
                        }
                    }


                    else if (violence)
                    {
                        if (RF_answer == 1)
                        {
                            abnormal_vote += 0.978546619845336;
                        }
                        else
                        {
                            normal_vote += 0.978546619845336;
                        }

                        if (LR_answer)
                        {
                            abnormal_vote += 0.8428031393318365;
                        }
                        else
                        {
                            normal_vote += 0.8428031393318365;
                        }

                        if (NB_answer == 1)
                        {
                            abnormal_vote += 0.8746569953754341;
                        }
                        else
                        {
                            normal_vote += 0.8746569953754341;
                        }

                        if (abnormal_vote.CompareTo(normal_vote) == 1)
                        {
                            count++;
                        }
                    }

                    if (hmm_count >= 2 || count >= 4)
                    {
                        if (count >= 4)
                        {
                            count = 0;
                        }
                        if (hmm_count >= 2)
                        {
                            hmm_count = 0;
                        }

                        this.pictureBox3.Invoke((MethodInvoker) delegate
                        {
                            // Running on the UI thread
                            pictureBox3.Image = Properties.Resources.warning;
                        });

                        if (alarm)
                        {
                            wplayer.URL = "D:\\2\\Real-Time Abnormal Event Detection And Tracking In Video\\Alarm.mp3";
                            wplayer.controls.play();
                        }



                        //pictureBox3.Image = Properties.Resources.warning;
                        warning      = true;
                        war_at_frame = camera_1.frameNum;

                        Media.Crop_video(vid1, (int)camera_1.frameNum / (fbs + 5), 30);
                        Media.thumbnail(vid1, (int)camera_1.frameNum / (fbs + 5));
                        Image image = Image.FromFile(@"D:\2\Real-Time Abnormal Event Detection And Tracking In Video\croped_videos\crop" + Media.num.ToString() + ".jpg");
                        dataGridView1.Rows.Add(image, @"D:\2\Real-Time Abnormal Event Detection And Tracking In Video\croped_videos\crop" + Media.num.ToString() + ".mpg");
                        Media.num++;
                    }

                    if (warning && camera_1.frameNum >= (war_at_frame + 10))
                    {
                        this.pictureBox3.Invoke((MethodInvoker) delegate
                        {
                            // Running on the UI thread
                            pictureBox3.Image = Properties.Resources._checked;
                        });
                        //pictureBox3.Image = Properties.Resources._checked;
                        warning = false;
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine("1--- ", e.Message);
                }
            }
        }
Beispiel #12
0
        private void GetResultButton_Click(object sender, EventArgs e)
        {
            Settings.Default["AlgorName"] = label3.Text;
            ResultBox.Items.Clear();
            if (label3.Text == "Statics")
            {
                if (!(PF is Istatics))
                {
                    MessageBox.Show("Production Facade doesn't have this interface yet");
                    return;
                }
                Settings.Default["Parameter"] = textBox1.Text;
                int    numOfParmas = PF.GetData()[0].GetNumOfParams();
                double mean, sd;
                for (int i = 0; i < numOfParmas; i++)
                {
                    mean = PF.GetMean(i);
                    ResultBox.Items.Add("Mean of param " + i + ": " + mean);
                    sd = PF.GetStandardDeviation(i);
                    ResultBox.Items.Add("StandardDeviation of param " + i + ": " + sd);
                    if (textBox1.Text != "")
                    {
                        double number    = Double.Parse(textBox1.Text);
                        double Threshold = PF.GetThreshold(number);
                        ResultBox.Items.Add("Threshold of param " + i + ": " + Threshold);
                        ResultBox.Items.Add("");
                    }
                }
            }
            else if (label3.Text == "Peak")
            {
                if (!(PF is Ipeek))
                {
                    MessageBox.Show("Production Facade doesn't have this interface yet");
                    return;
                }
                if (XaxiscomboBox.Text == "" && YaxiscomboBox.Text == "")
                {
                    return;
                }
                sortData();
                Settings.Default["NumOfPoints"] = numBox.Text;
                Settings.Default["Range"]       = rangeBox.Text;
                if (YaxiscomboBox.Text.Contains("#"))
                {
                    string text        = YaxiscomboBox.Text.Substring(10).Trim();
                    int    num         = Int32.Parse(text);
                    int    numOfpoints = 3;
                    double percentage  = 0.2;
                    if (numBox.Text != "")
                    {
                        numOfpoints = Int32.Parse(numBox.Text);
                    }
                    if (rangeBox.Text != "")
                    {
                        percentage = double.Parse(rangeBox.Text);
                    }
                    dataDict = PF.CalculateTimesEachX(num);
                    List <PeekValleyData> peaks = PF.GetPeaksWithNumOfSamePoints(num, numOfpoints, percentage);
                    peaksData = peaks;

                    /*
                     * for (int i = 0; i < PF.GetData().Length; i++)
                     * {
                     *  ResultBox.Items.Add(PF.GetData()[i].WriteToLine());
                     * }*/
                    if (peaksData.Count() == 0)
                    {
                        ResultBox.Items.Add("No Peak found yet.");
                    }
                    foreach (var item in peaks)
                    {
                        ResultBox.Items.Add("Peak Value: " + item.value + "   Times: " + item.times);
                    }
                }
                else if (YaxiscomboBox.Text != "")
                {
                    string text = YaxiscomboBox.Text.Substring(5).Trim();

                    int Yindex = Int32.Parse(text);
                    // MessageBox.Show(Yindex.ToString());
                    int    numOfpoints = 3;
                    double percentage  = 0.2;
                    if (numBox.Text != "")
                    {
                        numOfpoints = Int32.Parse(numBox.Text);
                    }
                    if (rangeBox.Text != "")
                    {
                        percentage = double.Parse(rangeBox.Text);
                    }
                    text = XaxiscomboBox.Text.Substring(5).Trim();
                    int Xindex;
                    if (XaxiscomboBox.Text.Contains("TimeSpan"))
                    {
                        Xindex = -1;
                    }
                    else
                    {
                        Xindex = Int32.Parse(text);
                    }
                    MyData[] MD = PF.GetData();
                    dataPoints = new points[MD.Count()];
                    for (int i = 0; i < MD.Count(); i++)
                    {
                        dataPoints[i].x = MD[i].GetParameters()[Xindex];
                        dataPoints[i].y = MD[i].GetParameters()[Yindex];
                    }
                    List <PeekValleyData> peaks = PF.GetPeaksWithXY(Yindex, Xindex, numOfpoints, percentage);
                    peaksData = peaks;

                    /*
                     * for (int i = 0; i < PF.GetData().Length; i++)
                     * {
                     *  ResultBox.Items.Add(PF.GetData()[i].WriteToLine());
                     * }
                     * */
                    foreach (var item in peaks)
                    {
                        ResultBox.Items.Add("X: " + item.x + "   Y: " + item.y);
                    }
                }
            }
            else if (label3.Text == "Polynomial Fit")
            {
                if (!(PF is Ipolyfit))
                {
                    MessageBox.Show("Production Facade doesn't have this interface yet");
                    return;
                }
                if (XaxiscomboBox.Text == "" && YaxiscomboBox.Text == "")
                {
                    return;
                }
                sortData();
                int power = 1;
                if (powerBox.Text != "")
                {
                    power = Int32.Parse(powerBox.Text);
                }
                if (YaxiscomboBox.Text.Contains("#"))
                {
                    string text = YaxiscomboBox.Text.Substring(10).Trim();
                    int    num  = Int32.Parse(text);
                    Dictionary <double, int> dict = PF.CalculateTimesEachX(num);
                    dataDict    = dict;
                    double[,] x = new double[dict.Keys.Count(), power];
                    double[] keys = dict.Keys.ToArray();
                    for (int i = 0; i < dict.Keys.Count(); i++)
                    {
                        for (int j = 0; j < power; j++)
                        {
                            x[i, j] = Math.Pow(keys[i], j + 1);
                        }
                    }
                    double[] y = new double[keys.Length];
                    for (int i = 0; i < keys.Length; i++)
                    {
                        y[i] = dict[keys[i]];
                    }
                    double[,] result = PF.GetPolyFit(x, y, power);
                    paramsResult     = result;
                    string r = "y=";
                    r = r + result[0, 0].ToString("#.000");
                    for (int i = 1; i <= power; i++)
                    {
                        r = r + "+ " + result[i, 0].ToString("#.000") + "x^" + i.ToString();
                    }
                    ResultBox.Items.Add(r);
                }
                else if (YaxiscomboBox.Text != "")
                {
                    string text   = YaxiscomboBox.Text.Substring(5).Trim();
                    int    Yindex = Int32.Parse(text);
                    int    Xindex;
                    if (XaxiscomboBox.Text.Contains("TimeSpan"))
                    {
                        MessageBox.Show("We have not support Timespan in this algorithm yet.");
                        return;
                    }
                    else
                    {
                        text   = XaxiscomboBox.Text.Substring(5).Trim();
                        Xindex = Int32.Parse(text);
                    }
                    MyData[] MD = PF.GetData();
                    dataPoints  = new points[MD.Count()];
                    double[,] x = new double[MD.Count(), power];
                    for (int i = 0; i < MD.Count(); i++)
                    {
                        dataPoints[i].x = MD[i].GetParameters()[Xindex];
                        for (int j = 0; j < power; j++)
                        {
                            x[i, j] = Math.Pow(MD[i].GetParameters()[Xindex], j + 1);
                        }
                    }
                    double[] y = new double[MD.Count()];
                    for (int i = 0; i < y.Length; i++)
                    {
                        y[i]            = MD[i].GetParameters()[Yindex];
                        dataPoints[i].y = y[i];
                    }
                    double[,] result = PF.GetPolyFit(x, y, power);
                    paramsResult     = result;
                    string r = "y=";
                    r = r + result[0, 0].ToString("#.000");
                    for (int i = 1; i <= power; i++)
                    {
                        r = r + "+ " + result[i, 0].ToString("#.000") + "x^" + i.ToString();
                    }
                    ResultBox.Items.Add(r);
                }
                if (xvalueBox.Text != "")
                {
                    double x = Convert.ToDouble(xvalueBox.Text);
                    double y = 0;
                    for (int j = 0; j < paramsResult.GetLength(0); j++)
                    {
                        y = y + Math.Pow(x, j) * paramsResult[j, 0];
                    }
                    ResultBox.Items.Add("X-value: " + x.ToString("#.000") + " Y-value: " + y.ToString("#.000"));
                    Settings.Default["Xvalue"] = xvalueBox.Text;
                }
                Settings.Default["Power"] = powerBox.Text;
            }
            else if (label3.Text == "Normal Distribution Fit")
            {
                if (!(PF is INormalDistributionFit))
                {
                    MessageBox.Show("Production Facade doesn't have this interface yet");
                    return;
                }
                if (XaxiscomboBox.Text == "" && YaxiscomboBox.Text == "")
                {
                    return;
                }
                sortData();

                /*
                 * for (int i = 0; i < PF.GetData().Length; i++)
                 * {
                 *  ResultBox.Items.Add(PF.GetData()[i].WriteToLine());
                 * }*/
                if (YaxiscomboBox.Text.Contains("#"))
                {
                    string text = YaxiscomboBox.Text.Substring(10).Trim();
                    int    num  = Int32.Parse(text);
                    dataDict = PF.CalculateTimesEachX(num);
                    NormalDistributionInfo ndi = PF.GetNormalDistributionFitWithNumOfSamePoints(num);
                    NDinfo = ndi;
                    ResultBox.Items.Add("Mean: " + ndi.mean.ToString() + " SD: " + ndi.SD.ToString());
                }
                else
                {
                    if (XaxiscomboBox.Text == "" || YaxiscomboBox.Text == "")
                    {
                        return;
                    }
                    string text   = YaxiscomboBox.Text.Substring(5).Trim();
                    int    Yindex = Int32.Parse(text);
                    int    Xindex;
                    if (XaxiscomboBox.Text.Contains("TimeSpan"))
                    {
                        MessageBox.Show("We have not support Timespan in this algorithm yet.");
                        return;
                    }
                    else
                    {
                        text   = XaxiscomboBox.Text.Substring(5).Trim();
                        Xindex = Int32.Parse(text);
                    }
                    MyData[] MD = PF.GetData();
                    dataPoints = new points[MD.Count()];
                    for (int i = 0; i < MD.Count(); i++)
                    {
                        dataPoints[i].x = MD[i].GetParameters()[Xindex];
                        dataPoints[i].y = MD[i].GetParameters()[Yindex];
                    }
                    int num = Int32.Parse(text);
                    dataDict = PF.CalculateTimesEachX(num);
                    NormalDistributionInfo ndi = PF.GetNormalDistributionFitWithNumOfSamePoints(num);
                    NDinfo = ndi;
                    ResultBox.Items.Add("Mean: " + ndi.mean.ToString() + " SD: " + ndi.SD.ToString());
                }
                if (xvalueBox.Text != "")
                {
                    double x = Convert.ToDouble(xvalueBox.Text);
                    double y = 0;
                    y = (1 / (NDinfo.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((x - NDinfo.mean), 2) / (2 * Math.Pow(NDinfo.SD, 2)));
                    int numberOfdata = 0;
                    foreach (var item in dataDict)
                    {
                        numberOfdata = numberOfdata + item.Value;
                    }
                    y = y * numberOfdata;
                    ResultBox.Items.Add("X-value: " + x.ToString("#.000") + " Y-value: " + y.ToString("#.000"));
                    Settings.Default["Xvalue"] = xvalueBox.Text;
                }
            }
            else if (label3.Text == "Logistic Regression")
            {
                if (!(PF is ILogisticRegression))
                {
                    MessageBox.Show("Production Facade doesn't have this interface yet");
                    return;
                }
                if (comboBox2.Text != "")
                {
                    string text  = comboBox2.Text.Substring(5).Trim();
                    int    index = Int32.Parse(text);
                    dataDict = PF.CalculateTimesEachX(index);
                    double percentage = 0.1;
                    PF.SortData(index);
                    if (rangeBox.Text != "")
                    {
                        percentage = double.Parse(rangeBox.Text);
                    }
                    LogisticRegression LR = PF.GetLogisticRegressionParams(index, percentage);
                    LogisticInfo       LI = new LogisticInfo();
                    LI.LogisticParams    = new double[2];
                    LI.LogisticParams[0] = LR.Intercept;
                    LI.LogisticParams[1] = LR.GetOddsRatio(1) - 1;
                    ResultBox.Items.Add("Param0: " + LI.LogisticParams[0] + " Parma1: " + LI.LogisticParams[1]);
                    if (xvalueBox.Text != "")
                    {
                        double   x        = Convert.ToDouble(xvalueBox.Text);
                        double[] valueArr = new double[] { x, 0 };
                        ResultBox.Items.Add("Value: " + x + " Probability: " + LR.Probability(valueArr) + " Conclusion:" + LR.Decide(valueArr));
                    }
                    MyData[] MD        = PF.GetData();
                    double   threshold = -1;

                    /*
                     * for (int i = 0; i < MD.Length; i++)
                     * {
                     *  double value = MD[i].GetParameters()[index];
                     *  double[] valueArr = new double[] { value, 0 };
                     *  ResultBox.Items.Add("Value: " + value + " Probability: " + LR.Probability(valueArr) + " Conclusion:" + LR.Decide(valueArr));
                     * }*/
                    Dictionary <double, int> lowerDict  = new Dictionary <double, int>();
                    Dictionary <double, int> higherDict = new Dictionary <double, int>();
                    for (int i = 0; i < MD.Length; i++)
                    {
                        double   value    = MD[i].GetParameters()[index];
                        double[] valueArr = new double[] { value, 0 };
                        if (threshold == -1 && LR.Decide(valueArr) == true)
                        {
                            threshold = value;
                        }
                        if (threshold == -1)
                        {
                            if (lowerDict.ContainsKey(value))
                            {
                                lowerDict[value]++;
                            }
                            else
                            {
                                lowerDict.Add(value, 1);
                            }
                        }
                        else
                        {
                            if (higherDict.ContainsKey(value))
                            {
                                higherDict[value]++;
                            }
                            else
                            {
                                higherDict.Add(value, 1);
                            }
                        }
                    }
                    ResultBox.Items.Add("Threshold: " + threshold);
                }
            }
            else if (label3.Text == "Two Peaks")
            {
                if (!(PF is Ipeek))
                {
                    MessageBox.Show("Production Facade doesn't have this interface yet");
                    return;
                }
                if (peak1Box.Text != "" && peak2Box.Text != "")
                {
                    string text        = YaxiscomboBox.Text.Substring(10).Trim();
                    int    index       = Int32.Parse(text);
                    double peak1       = Convert.ToDouble(peak1Box.Text);
                    double peak2       = Convert.ToDouble(peak2Box.Text);
                    int    numOfpoints = 5;
                    if (numBox.Text != "")
                    {
                        numOfpoints = Int32.Parse(numBox.Text);
                    }
                    NormalDistributionInfo peak1Info = PF.GetPeakNormalDistribution(peak1, index, numOfpoints);
                    ResultBox.Items.Add("Mean: " + peak1Info.mean.ToString("#.000") + " SD: " + peak1Info.SD.ToString("#.000"));

                    NormalDistributionInfo peak2Info = PF.GetPeakNormalDistribution(peak2, index, numOfpoints);
                    ResultBox.Items.Add("Mean: " + peak2Info.mean.ToString("#.000") + " SD: " + peak2Info.SD.ToString("#.000"));

                    if (xvalueBox.Text != "")
                    {
                        double x   = Convert.ToDouble(xvalueBox.Text);
                        double end = 0;
                        double y   = (1 / (peak1Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((x - peak1Info.mean), 2) / (2 * Math.Pow(peak1Info.SD, 2)));
                        if (y < 0.01)
                        {
                            double x1 = x - 1;
                            double y1 = (1 / (peak1Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((x1 - peak1Info.mean), 2) / (2 * Math.Pow(peak1Info.SD, 2)));
                            if (y1 < y)
                            {
                                end = x1;
                            }
                            else
                            {
                                end = x + 1;
                            }
                        }
                        else
                        {
                            double x1 = x - 1;
                            double y1 = (1 / (peak1Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((x1 - peak1Info.mean), 2) / (2 * Math.Pow(peak1Info.SD, 2)));
                            if (y1 < y)
                            {
                                end = x1;
                                while (y1 > 0.01)
                                {
                                    end--;
                                    y1 = (1 / (peak1Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((end - peak1Info.mean), 2) / (2 * Math.Pow(peak1Info.SD, 2)));
                                }
                            }
                            else
                            {
                                end = x + 1;
                                y1  = (1 / (peak1Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((end - peak1Info.mean), 2) / (2 * Math.Pow(peak1Info.SD, 2)));
                                while (y1 > 0.01)
                                {
                                    end++;
                                    y1 = (1 / (peak1Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((end - peak1Info.mean), 2) / (2 * Math.Pow(peak1Info.SD, 2)));
                                }
                            }
                        }
                        Func <double, double> f1 = (a) => (1 / (peak1Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((a - peak1Info.mean), 2) / (2 * Math.Pow(peak1Info.SD, 2)));
                        double result1           = MathNet.Numerics.Integration.NewtonCotesTrapeziumRule.IntegrateTwoPoint(f1, x, end);
                        if (x <= peak1)
                        {
                            result1 = 1 - Math.Abs(result1);
                            if (result1 > 1)
                            {
                                result1 = 1;
                            }
                        }
                        ResultBox.Items.Add("Probablity of keeping bad chips: " + Math.Abs(result1).ToString("#.000"));
                        double end1 = 0;
                        y = (1 / (peak2Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((x - peak2Info.mean), 2) / (2 * Math.Pow(peak2Info.SD, 2)));
                        if (y < 0.01)
                        {
                            double x1 = x - 1;
                            double y1 = (1 / (peak2Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((x1 - peak2Info.mean), 2) / (2 * Math.Pow(peak2Info.SD, 2)));
                            if (y1 < y)
                            {
                                end1 = x1;
                            }
                            else
                            {
                                end1 = x + 1;
                            }
                        }
                        else
                        {
                            double x1 = x - 1;
                            double y1 = (1 / (peak2Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((x1 - peak2Info.mean), 2) / (2 * Math.Pow(peak2Info.SD, 2)));
                            if (y1 < y)
                            {
                                end1 = x1;
                                while (y1 > 0.01)
                                {
                                    end1--;
                                    y1 = (1 / (peak2Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((end1 - peak2Info.mean), 2) / (2 * Math.Pow(peak2Info.SD, 2)));
                                }
                            }
                            else
                            {
                                end1 = x + 1;
                                y1   = (1 / (peak2Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((end1 - peak2Info.mean), 2) / (2 * Math.Pow(peak2Info.SD, 2)));
                                while (y1 > 0.01)
                                {
                                    end1++;
                                    y1 = (1 / (peak2Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((end1 - peak2Info.mean), 2) / (2 * Math.Pow(peak2Info.SD, 2)));
                                }
                            }
                        }
                        Func <double, double> f2 = (a) => (1 / (peak2Info.SD * Math.Sqrt(2 * Math.PI))) * Math.Exp(-Math.Pow((a - peak2Info.mean), 2) / (2 * Math.Pow(peak2Info.SD, 2)));
                        double result2           = MathNet.Numerics.Integration.NewtonCotesTrapeziumRule.IntegrateTwoPoint(f2, x, end1);
                        if (x >= peak2)
                        {
                            result2 = 1 - Math.Abs(result2);
                            if (result2 > 1)
                            {
                                result2 = 1;
                            }
                        }
                        ResultBox.Items.Add("probability of losing good chips: " + Math.Abs(result2).ToString("#.000"));
                    }
                    dataDict = PF.CalculateTimesEachX(index);



                    TDinfo            = new TwoDictInfo();
                    TDinfo.peak1Info  = peak1Info;
                    TDinfo.peak2Info  = peak2Info;
                    TDinfo.peak1value = peak1;
                    TDinfo.peak2value = peak2;
                    TDinfo.peak1times = dataDict[peak1];
                    TDinfo.peak2times = dataDict[peak2];
                    TDinfo.last       = dataDict.Keys.ToList().Last();
                    //peak1Box.Text = "";
                    // peak2Box.Text = "";
                }
                else
                {
                    if (XaxiscomboBox.Text == "" && YaxiscomboBox.Text == "")
                    {
                        return;
                    }
                    sortData();
                    Settings.Default["NumOfPoints"] = numBox.Text;
                    Settings.Default["Range"]       = rangeBox.Text;
                    if (YaxiscomboBox.Text.Contains("#"))
                    {
                        string text        = YaxiscomboBox.Text.Substring(10).Trim();
                        int    num         = Int32.Parse(text);
                        int    numOfpoints = 3;
                        double percentage  = 0.2;
                        if (numBox.Text != "")
                        {
                            numOfpoints = Int32.Parse(numBox.Text);
                        }
                        if (rangeBox.Text != "")
                        {
                            percentage = double.Parse(rangeBox.Text);
                        }
                        dataDict = PF.CalculateTimesEachX(num);
                        List <PeekValleyData> peaks = PF.GetPeaksWithNumOfSamePoints(num, numOfpoints, percentage);
                        peaksData = peaks;

                        /*
                         * for (int i = 0; i < PF.GetData().Length; i++)
                         * {
                         *  ResultBox.Items.Add(PF.GetData()[i].WriteToLine());
                         * }*/
                        if (peaksData.Count() == 0)
                        {
                            ResultBox.Items.Add("No Peak found yet.");
                        }
                        foreach (var item in peaks)
                        {
                            ResultBox.Items.Add("Peak Value: " + item.value + "   Times: " + item.times);
                        }
                    }
                    else if (YaxiscomboBox.Text != "")
                    {
                        string text = YaxiscomboBox.Text.Substring(5).Trim();

                        int Yindex = Int32.Parse(text);
                        // MessageBox.Show(Yindex.ToString());
                        int    numOfpoints = 3;
                        double percentage  = 0.2;
                        if (numBox.Text != "")
                        {
                            numOfpoints = Int32.Parse(numBox.Text);
                        }
                        if (rangeBox.Text != "")
                        {
                            percentage = double.Parse(rangeBox.Text);
                        }
                        text = XaxiscomboBox.Text.Substring(5).Trim();
                        int Xindex;
                        if (XaxiscomboBox.Text.Contains("TimeSpan"))
                        {
                            Xindex = -1;
                        }
                        else
                        {
                            Xindex = Int32.Parse(text);
                        }
                        MyData[] MD = PF.GetData();
                        dataPoints = new points[MD.Count()];
                        for (int i = 0; i < MD.Count(); i++)
                        {
                            dataPoints[i].x = MD[i].GetParameters()[Xindex];
                            dataPoints[i].y = MD[i].GetParameters()[Yindex];
                        }
                        List <PeekValleyData> peaks = PF.GetPeaksWithXY(Yindex, Xindex, numOfpoints, percentage);
                        peaksData = peaks;

                        /*
                         * for (int i = 0; i < PF.GetData().Length; i++)
                         * {
                         *  ResultBox.Items.Add(PF.GetData()[i].WriteToLine());
                         * }
                         * */
                        foreach (var item in peaks)
                        {
                            ResultBox.Items.Add("X: " + item.x + "   Y: " + item.y);
                        }
                    }
                    if (peaksData.Count() < 2)
                    {
                        ResultBox.Items.Add("We need at least two peaks in this algorithm");
                    }
                    else
                    {
                        peak1Box.Show();
                        peak2Box.Show();
                        label13.Show();
                        label12.Show();
                        xvalueBox.Show();
                        label10.Show();
                        peak1Box.Items.Clear();
                        peak2Box.Items.Clear();
                        points[] pts = GetPeakPoints();
                        foreach (var point in pts)
                        {
                            if (!peak1Box.Items.Contains(point.x))
                            {
                                peak1Box.Items.Add(point.x);
                            }
                            if (!peak2Box.Items.Contains(point.x))
                            {
                                peak2Box.Items.Add(point.x);
                            }
                        }
                    }
                }
            }
            Settings.Default.Save();
        }