Example #1
0
        private void DoWork(object sender, DoWorkEventArgs e)
        {
            BackgroundWorker worker = sender as BackgroundWorker;

            e.Result = new double[0];

            if (worker.CancellationPending)
            {
                e.Cancel = true;
                return;
            }

            TrainingDialog obj = (TrainingDialog)e.Argument;

            _Series++;

            #region Prepare data to be trained. Involves copying.

            int numTrainingSets = obj._DataSet.Training.Rows.Count;

            ArrayList inputs  = new ArrayList();
            ArrayList outputs = new ArrayList();

            for (int i = 0; i < numTrainingSets; i++)
            {
                // Input data
                double[] inData = new double[_NumInputNeurons];

                inData[0]  = obj._DataSet.Training[i].LipCornerLeftX;
                inData[1]  = obj._DataSet.Training[i].LipCornerLeftY;
                inData[2]  = obj._DataSet.Training[i].LipCornerRightX;
                inData[3]  = obj._DataSet.Training[i].LipCornerRightY;
                inData[4]  = obj._DataSet.Training[i].LipUpLeftX;
                inData[5]  = obj._DataSet.Training[i].LipUpLeftY;
                inData[6]  = obj._DataSet.Training[i].LipUpCenterX;
                inData[7]  = obj._DataSet.Training[i].LipUpCenterY;
                inData[8]  = obj._DataSet.Training[i].LipUpRightX;
                inData[9]  = obj._DataSet.Training[i].LipUpRightY;
                inData[10] = obj._DataSet.Training[i].LipBottomLeftX;
                inData[11] = obj._DataSet.Training[i].LipBottomLeftY;
                inData[12] = obj._DataSet.Training[i].LipBottomCenterX;
                inData[13] = obj._DataSet.Training[i].LipBottomCenterY;
                inData[14] = obj._DataSet.Training[i].LipBottomRightX;
                inData[15] = obj._DataSet.Training[i].LipBottomRightY;
                inData[16] = obj._DataSet.Training[i].LeftEyeCenterX;
                inData[17] = obj._DataSet.Training[i].LeftEyeCenterY;
                inData[18] = obj._DataSet.Training[i].LeftLidBottomX;
                inData[19] = obj._DataSet.Training[i].LeftLidBottomY;
                inData[20] = obj._DataSet.Training[i].LeftLidCornerLeftX;
                inData[21] = obj._DataSet.Training[i].LeftLidCornerLeftY;
                inData[22] = obj._DataSet.Training[i].LeftLidCornerRightX;
                inData[23] = obj._DataSet.Training[i].LeftLidCornerRightY;
                inData[24] = obj._DataSet.Training[i].LeftLidUpX;
                inData[25] = obj._DataSet.Training[i].LeftLidUpY;
                inData[26] = obj._DataSet.Training[i].MouthCenterX;
                inData[27] = obj._DataSet.Training[i].MouthCenterY;
                inData[28] = obj._DataSet.Training[i].RightEyeCenterX;
                inData[29] = obj._DataSet.Training[i].RightEyeCenterY;
                inData[30] = obj._DataSet.Training[i].RightLidBottomX;
                inData[31] = obj._DataSet.Training[i].RightLidBottomY;
                inData[32] = obj._DataSet.Training[i].RightLidCornerLeftX;
                inData[33] = obj._DataSet.Training[i].RightLidCornerLeftY;
                inData[34] = obj._DataSet.Training[i].RightLidCornerRightX;
                inData[35] = obj._DataSet.Training[i].RightLidCornerRightY;
                inData[36] = obj._DataSet.Training[i].RightLidUpX;
                inData[37] = obj._DataSet.Training[i].RightLidUpY;

                inputs.Add(inData);

                // Output data
                double[] outData = new double[_NumOutputNeurons];

                int    eid        = obj._DataSet.Training[i].ExpressionOID;
                string expression = obj._DataSet.Expression.FindByExpressionOID(eid).Expression.ToLower();

                outData[0] = expression.Contains("anger") ? 1 : 0;
                outData[1] = expression.Contains("disg") ? 1 : 0;
                outData[2] = expression.Contains("fear") ? 1 : 0;
                outData[3] = expression.Contains("happy") ? 1 : 0;
                outData[4] = expression.Contains("neutr") ? 1 : 0;
                outData[5] = expression.Contains("sad") ? 1 : 0;
                outData[6] = expression.Contains("surp") ? 1 : 0;

                outputs.Add(outData);
            }
            #endregion

            #region Norm datasets per input neuron
            for (int j = 0; j < _NumInputNeurons; j++)
            {
                double min = 100000000.0;
                double max = -100000000.0;

                for (int i = 0; i < numTrainingSets; i++)
                {
                    double cur = ((double[])inputs[i])[j];

                    if (min > cur)
                    {
                        min = cur;
                    }
                    if (max < cur)
                    {
                        max = cur;
                    }
                }

                for (int i = 0; (max - min) != 0 && i < numTrainingSets; i++)
                {
                    ((double[])inputs[i])[j] = (((double[])inputs[i])[j] - min) / (max - min);
                }
            }
            #endregion

            #region Pick random train-, validate and test datasets

            // Like Mr. Schneider ;)
            int numTestDataSets       = (int)Math.Floor((double)numTrainingSets * 0.1);
            int numValidationDataSets = (int)Math.Floor((double)(numTrainingSets - numTestDataSets) * 0.2);
            int numTrainDataSets      = numTrainingSets - numTestDataSets - numValidationDataSets;

            Random rand = new Random();

            // Get random training data
            double[][] trainingInputs  = new double[numTrainDataSets][];
            double[][] trainingOutputs = new double[numTrainDataSets][];
            for (int i = 0; i < trainingInputs.GetLength(0); i++)
            {
                int idx = rand.Next(inputs.Count);
                trainingInputs[i]  = (double[])inputs[idx];
                trainingOutputs[i] = (double[])outputs[idx];
                inputs.Remove(idx);
                outputs.Remove(idx);
            }

            // Get random validation data
            double[][] validateInputs  = new double[numValidationDataSets][];
            double[][] validateOutputs = new double[numValidationDataSets][];
            for (int i = 0; i < validateInputs.GetLength(0); i++)
            {
                int idx = rand.Next(numTrainDataSets);
                validateInputs[i]  = trainingInputs[idx];
                validateOutputs[i] = trainingOutputs[idx];
            }

            // Get random test data
            double[][] testInputs  = new double[numTestDataSets][];
            double[][] testOutputs = new double[numTestDataSets][];
            for (int i = 0; i < testInputs.GetLength(0); i++)
            {
                int idx = rand.Next(inputs.Count);
                testInputs[i]  = (double[])inputs[idx];
                testOutputs[i] = (double[])outputs[idx];
                inputs.Remove(idx);
                outputs.Remove(idx);
            }

            #endregion

            #region

            _Network.SetActivationFunction(new SigmoidFunction(_SigmoidAlpha));

            BackPropagationLearning teacher = new BackPropagationLearning(_Network);
            teacher.LearningRate = obj._LearningRate;
            teacher.Momentum     = obj._Momentum;

            double error         = 1;
            double maxError      = -10000.0;
            int    maxIterations = obj._MaxIterations;
            double epsilon       = obj._Epsilon;

            // Prepare the error Chart
            double[,] errorValues = new double[maxIterations, 2];

            ProgressState state = new ProgressState();

            double[] errors = new double[maxIterations];

            for (int i = 0; i < maxIterations || error <= epsilon; i++)
            {
                if (maxIterations > 0)
                {
                    if (i >= maxIterations)
                    {
                        break;
                    }
                }
                if (epsilon > 0.0)
                {
                    if (error <= epsilon)
                    {
                        break;
                    }
                }
                // Abort if use requested it
                if (worker.CancellationPending)
                {
                    e.Cancel = true;
                    break;
                }

                // Train
                error = teacher.RunEpoch(trainingInputs, trainingOutputs);

                // Store error for result
                errors[i]   = error;
                state.Error = error;

                // Plot size
                if (maxError < error)
                {
                    maxError = error;
                }

                // Plot values
                errorValues[i, 0] = i;
                errorValues[i, 1] = error;

                // Report progress
                if (i % 10 == 0 || i == (maxIterations - 1))
                {
                    state.Iteration = i + 1;
                    state.Error     = error;

                    worker.ReportProgress((int)((float)(100 * (i + 1)) / (float)maxIterations), state);
                }
            }

            e.Result = errors;


            obj._Chart.RangeX = new DoubleRange(0.0, (double)maxIterations);
            obj._Chart.RangeY = new DoubleRange(0.0, maxError);

            // add new data series to the chart
            obj._Chart.AddDataSeries("Error " + _Series, System.Drawing.Color.DarkGreen, Chart.SeriesType.ConnectedDots, 2);

            // update the chart
            obj._Chart.UpdateDataSeries("Error " + _Series, errorValues);

            #endregion
        }
        /// <summary>
        /// Callback which starts the training process.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void TrainingStart_Executed(object sender, ExecutedRoutedEventArgs e)
        {
            TrainingDialog dlg = new TrainingDialog(ref _DataSet);

            dlg.Owner = this;
            dlg.ShowDialog();
            return;

            // OOOOOLLLLD!!!!


            //Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            //dlg.DefaultExt = "."; // Default file extension
            //dlg.Filter = "Neural network (*.xml)|*.xml|All files (*.*)|*.*"; // Filter files by extension
            //dlg.Title = "Select the neural network you want to train?";
            //dlg.Multiselect = false;

            //// Show save file dialog box
            //Nullable<bool> result = dlg.ShowDialog();

            //if (result == true)
            //{
            //    bool res = _KLU.LoadANN(dlg.FileName);

            //    Console.WriteLine("Load ANN success? " + res );

            //    if ( !res )
            //    {
            //        return;
            //    }

            //    statusText.Text = "ANN loaded: " + dlg.FileName;

            //    TerminationCriteria terminationCriteria = new TerminationCriteria();
            //    terminationCriteria.TerminationType = Convert.ToInt32(TrainingTermination.MaxIterationTermination);
            //    terminationCriteria.MaxIteration = 2000; // TODO: (Ko) after checking the saved ANN file I think, there is an error with this number

            //    TrainOptions options = new TrainOptions();
            //    options.Algorithm = TrainingAlgorithm.BackpropAlgorithm;
            //    options.Termination = terminationCriteria;

            //    statusText.Text = "Now training...";

            //    // Enable infinite progess indicator
            //    statusProgess.IsEnabled = true;
            //    statusProgess.Visibility = Visibility.Visible;

            //    #region Prepare data to be trained. Involves copying.

            //    int numTrainingSets = _DataSet.Training.Rows.Count;
            //    int numInputNeurons = 16;
            //    int numOutputNeurons = 1;
            //    float[] inputs = new float[numTrainingSets * numInputNeurons];
            //    float[] outputs = new float[numTrainingSets * numOutputNeurons];

            //    for (int i = 0; i < numTrainingSets; i++)
            //    {
            //        inputs[i * numInputNeurons + 0] = _DataSet.Training[i].LipCornerLeftX;
            //        inputs[i * numInputNeurons + 1] = _DataSet.Training[i].LipCornerLeftY;
            //        inputs[i * numInputNeurons + 2] = _DataSet.Training[i].LipCornerRightX;
            //        inputs[i * numInputNeurons + 3] = _DataSet.Training[i].LipCornerRightY;
            //        inputs[i * numInputNeurons + 4] = _DataSet.Training[i].LipUpLeftX;
            //        inputs[i * numInputNeurons + 5] = _DataSet.Training[i].LipUpLeftY;
            //        inputs[i * numInputNeurons + 6] = _DataSet.Training[i].LipUpCenterX;
            //        inputs[i * numInputNeurons + 7] = _DataSet.Training[i].LipUpCenterY;
            //        inputs[i * numInputNeurons + 8] = _DataSet.Training[i].LipUpRightX;
            //        inputs[i * numInputNeurons + 9] = _DataSet.Training[i].LipUpRightY;
            //        inputs[i * numInputNeurons + 10] = _DataSet.Training[i].LipBottomLeftX;
            //        inputs[i * numInputNeurons + 11] = _DataSet.Training[i].LipBottomLeftY;
            //        inputs[i * numInputNeurons + 12] = _DataSet.Training[i].LipBottomCenterX;
            //        inputs[i * numInputNeurons + 13] = _DataSet.Training[i].LipBottomCenterY;
            //        inputs[i * numInputNeurons + 14] = _DataSet.Training[i].LipBottomRightX;
            //        inputs[i * numInputNeurons + 15] = _DataSet.Training[i].LipBottomRightY;

            //        outputs[i] = _DataSet.Training[i].ExpressionOID;
            //    }
            //    #endregion

            //    _KLU.SaveANN(dlg.FileName + ".untrained.xml");

            //    int iters = -10;
            //    res = _KLU.TrainAnn(options, numTrainingSets, inputs, outputs, ref iters);

            //    Console.WriteLine("Training success? " + res);

            //    // Disable infinite progess indicator
            //    statusProgess.IsEnabled = false;
            //    statusProgess.Visibility = Visibility.Hidden;

            //    statusText.Text = "Training result: " + res;

            //    _KLU.SaveANN(dlg.FileName + ".trained.xml");

            //    // Let's see if the ANN is trained. (simple for now)
            //    // TODO: (Ko) Do 7 out of 10 validation etc.

            //    // Test the first dataset and compare the predicted output with the expected
            //    float[] results = new float[1];
            //    _KLU.PredictANN(inputs, 16, results, 1);

            //    Console.WriteLine("Expected output: " + _DataSet.Training[0].ExpressionOID + " Predicted output: " + results[0]);
            //}
        }
        /// <summary>
        /// Callback which starts the training process.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void TrainingStart_Executed(object sender, ExecutedRoutedEventArgs e)
        {
            TrainingDialog dlg = new TrainingDialog(ref _DataSet);
            dlg.Owner = this;
            dlg.ShowDialog();
            return;

            // OOOOOLLLLD!!!!

                
            //Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            //dlg.DefaultExt = "."; // Default file extension
            //dlg.Filter = "Neural network (*.xml)|*.xml|All files (*.*)|*.*"; // Filter files by extension
            //dlg.Title = "Select the neural network you want to train?";
            //dlg.Multiselect = false;

            //// Show save file dialog box
            //Nullable<bool> result = dlg.ShowDialog();

            //if (result == true)
            //{
            //    bool res = _KLU.LoadANN(dlg.FileName);

            //    Console.WriteLine("Load ANN success? " + res );

            //    if ( !res )
            //    {
            //        return;
            //    }

            //    statusText.Text = "ANN loaded: " + dlg.FileName;

            //    TerminationCriteria terminationCriteria = new TerminationCriteria();
            //    terminationCriteria.TerminationType = Convert.ToInt32(TrainingTermination.MaxIterationTermination);
            //    terminationCriteria.MaxIteration = 2000; // TODO: (Ko) after checking the saved ANN file I think, there is an error with this number

            //    TrainOptions options = new TrainOptions();
            //    options.Algorithm = TrainingAlgorithm.BackpropAlgorithm;
            //    options.Termination = terminationCriteria;

            //    statusText.Text = "Now training...";

            //    // Enable infinite progess indicator
            //    statusProgess.IsEnabled = true;
            //    statusProgess.Visibility = Visibility.Visible;

            //    #region Prepare data to be trained. Involves copying.

            //    int numTrainingSets = _DataSet.Training.Rows.Count;
            //    int numInputNeurons = 16;
            //    int numOutputNeurons = 1;
            //    float[] inputs = new float[numTrainingSets * numInputNeurons];
            //    float[] outputs = new float[numTrainingSets * numOutputNeurons];

            //    for (int i = 0; i < numTrainingSets; i++)
            //    {
            //        inputs[i * numInputNeurons + 0] = _DataSet.Training[i].LipCornerLeftX;
            //        inputs[i * numInputNeurons + 1] = _DataSet.Training[i].LipCornerLeftY;
            //        inputs[i * numInputNeurons + 2] = _DataSet.Training[i].LipCornerRightX;
            //        inputs[i * numInputNeurons + 3] = _DataSet.Training[i].LipCornerRightY;
            //        inputs[i * numInputNeurons + 4] = _DataSet.Training[i].LipUpLeftX;
            //        inputs[i * numInputNeurons + 5] = _DataSet.Training[i].LipUpLeftY;
            //        inputs[i * numInputNeurons + 6] = _DataSet.Training[i].LipUpCenterX;
            //        inputs[i * numInputNeurons + 7] = _DataSet.Training[i].LipUpCenterY;
            //        inputs[i * numInputNeurons + 8] = _DataSet.Training[i].LipUpRightX;
            //        inputs[i * numInputNeurons + 9] = _DataSet.Training[i].LipUpRightY;
            //        inputs[i * numInputNeurons + 10] = _DataSet.Training[i].LipBottomLeftX;
            //        inputs[i * numInputNeurons + 11] = _DataSet.Training[i].LipBottomLeftY;
            //        inputs[i * numInputNeurons + 12] = _DataSet.Training[i].LipBottomCenterX;
            //        inputs[i * numInputNeurons + 13] = _DataSet.Training[i].LipBottomCenterY;
            //        inputs[i * numInputNeurons + 14] = _DataSet.Training[i].LipBottomRightX;
            //        inputs[i * numInputNeurons + 15] = _DataSet.Training[i].LipBottomRightY;

            //        outputs[i] = _DataSet.Training[i].ExpressionOID;
            //    }
            //    #endregion

            //    _KLU.SaveANN(dlg.FileName + ".untrained.xml");

            //    int iters = -10;
            //    res = _KLU.TrainAnn(options, numTrainingSets, inputs, outputs, ref iters);

            //    Console.WriteLine("Training success? " + res);

            //    // Disable infinite progess indicator
            //    statusProgess.IsEnabled = false;
            //    statusProgess.Visibility = Visibility.Hidden;

            //    statusText.Text = "Training result: " + res;

            //    _KLU.SaveANN(dlg.FileName + ".trained.xml");

            //    // Let's see if the ANN is trained. (simple for now)
            //    // TODO: (Ko) Do 7 out of 10 validation etc.

            //    // Test the first dataset and compare the predicted output with the expected
            //    float[] results = new float[1];
            //    _KLU.PredictANN(inputs, 16, results, 1);

            //    Console.WriteLine("Expected output: " + _DataSet.Training[0].ExpressionOID + " Predicted output: " + results[0]);
            //}
        }