Пример #1
0
 /// <summary>
 /// Load network from file
 /// </summary>
 public void LoadNetwork()
 {
     network = (ActivationNetwork)Network.Load("..\\..\\network\\network.dat");
     network.SetActivationFunction(new SigmoidFunction(1));
 }
Пример #2
0
        private void DoWork(object sender, DoWorkEventArgs e)
        {
            BackgroundWorker worker = sender as BackgroundWorker;

            e.Result = new double[0];

            if (worker.CancellationPending)
            {
                e.Cancel = true;
                return;
            }

            TrainingDialog obj = (TrainingDialog)e.Argument;

            _Series++;

            #region Prepare data to be trained. Involves copying.

            int numTrainingSets = obj._DataSet.Training.Rows.Count;

            ArrayList inputs  = new ArrayList();
            ArrayList outputs = new ArrayList();

            for (int i = 0; i < numTrainingSets; i++)
            {
                // Input data
                double[] inData = new double[_NumInputNeurons];

                inData[0]  = obj._DataSet.Training[i].LipCornerLeftX;
                inData[1]  = obj._DataSet.Training[i].LipCornerLeftY;
                inData[2]  = obj._DataSet.Training[i].LipCornerRightX;
                inData[3]  = obj._DataSet.Training[i].LipCornerRightY;
                inData[4]  = obj._DataSet.Training[i].LipUpLeftX;
                inData[5]  = obj._DataSet.Training[i].LipUpLeftY;
                inData[6]  = obj._DataSet.Training[i].LipUpCenterX;
                inData[7]  = obj._DataSet.Training[i].LipUpCenterY;
                inData[8]  = obj._DataSet.Training[i].LipUpRightX;
                inData[9]  = obj._DataSet.Training[i].LipUpRightY;
                inData[10] = obj._DataSet.Training[i].LipBottomLeftX;
                inData[11] = obj._DataSet.Training[i].LipBottomLeftY;
                inData[12] = obj._DataSet.Training[i].LipBottomCenterX;
                inData[13] = obj._DataSet.Training[i].LipBottomCenterY;
                inData[14] = obj._DataSet.Training[i].LipBottomRightX;
                inData[15] = obj._DataSet.Training[i].LipBottomRightY;
                inData[16] = obj._DataSet.Training[i].LeftEyeCenterX;
                inData[17] = obj._DataSet.Training[i].LeftEyeCenterY;
                inData[18] = obj._DataSet.Training[i].LeftLidBottomX;
                inData[19] = obj._DataSet.Training[i].LeftLidBottomY;
                inData[20] = obj._DataSet.Training[i].LeftLidCornerLeftX;
                inData[21] = obj._DataSet.Training[i].LeftLidCornerLeftY;
                inData[22] = obj._DataSet.Training[i].LeftLidCornerRightX;
                inData[23] = obj._DataSet.Training[i].LeftLidCornerRightY;
                inData[24] = obj._DataSet.Training[i].LeftLidUpX;
                inData[25] = obj._DataSet.Training[i].LeftLidUpY;
                inData[26] = obj._DataSet.Training[i].MouthCenterX;
                inData[27] = obj._DataSet.Training[i].MouthCenterY;
                inData[28] = obj._DataSet.Training[i].RightEyeCenterX;
                inData[29] = obj._DataSet.Training[i].RightEyeCenterY;
                inData[30] = obj._DataSet.Training[i].RightLidBottomX;
                inData[31] = obj._DataSet.Training[i].RightLidBottomY;
                inData[32] = obj._DataSet.Training[i].RightLidCornerLeftX;
                inData[33] = obj._DataSet.Training[i].RightLidCornerLeftY;
                inData[34] = obj._DataSet.Training[i].RightLidCornerRightX;
                inData[35] = obj._DataSet.Training[i].RightLidCornerRightY;
                inData[36] = obj._DataSet.Training[i].RightLidUpX;
                inData[37] = obj._DataSet.Training[i].RightLidUpY;

                inputs.Add(inData);

                // Output data
                double[] outData = new double[_NumOutputNeurons];

                int    eid        = obj._DataSet.Training[i].ExpressionOID;
                string expression = obj._DataSet.Expression.FindByExpressionOID(eid).Expression.ToLower();

                outData[0] = expression.Contains("anger") ? 1 : 0;
                outData[1] = expression.Contains("disg") ? 1 : 0;
                outData[2] = expression.Contains("fear") ? 1 : 0;
                outData[3] = expression.Contains("happy") ? 1 : 0;
                outData[4] = expression.Contains("neutr") ? 1 : 0;
                outData[5] = expression.Contains("sad") ? 1 : 0;
                outData[6] = expression.Contains("surp") ? 1 : 0;

                outputs.Add(outData);
            }
            #endregion

            #region Norm datasets per input neuron
            for (int j = 0; j < _NumInputNeurons; j++)
            {
                double min = 100000000.0;
                double max = -100000000.0;

                for (int i = 0; i < numTrainingSets; i++)
                {
                    double cur = ((double[])inputs[i])[j];

                    if (min > cur)
                    {
                        min = cur;
                    }
                    if (max < cur)
                    {
                        max = cur;
                    }
                }

                for (int i = 0; (max - min) != 0 && i < numTrainingSets; i++)
                {
                    ((double[])inputs[i])[j] = (((double[])inputs[i])[j] - min) / (max - min);
                }
            }
            #endregion

            #region Pick random train-, validate and test datasets

            // Like Mr. Schneider ;)
            int numTestDataSets       = (int)Math.Floor((double)numTrainingSets * 0.1);
            int numValidationDataSets = (int)Math.Floor((double)(numTrainingSets - numTestDataSets) * 0.2);
            int numTrainDataSets      = numTrainingSets - numTestDataSets - numValidationDataSets;

            Random rand = new Random();

            // Get random training data
            double[][] trainingInputs  = new double[numTrainDataSets][];
            double[][] trainingOutputs = new double[numTrainDataSets][];
            for (int i = 0; i < trainingInputs.GetLength(0); i++)
            {
                int idx = rand.Next(inputs.Count);
                trainingInputs[i]  = (double[])inputs[idx];
                trainingOutputs[i] = (double[])outputs[idx];
                inputs.Remove(idx);
                outputs.Remove(idx);
            }

            // Get random validation data
            double[][] validateInputs  = new double[numValidationDataSets][];
            double[][] validateOutputs = new double[numValidationDataSets][];
            for (int i = 0; i < validateInputs.GetLength(0); i++)
            {
                int idx = rand.Next(numTrainDataSets);
                validateInputs[i]  = trainingInputs[idx];
                validateOutputs[i] = trainingOutputs[idx];
            }

            // Get random test data
            double[][] testInputs  = new double[numTestDataSets][];
            double[][] testOutputs = new double[numTestDataSets][];
            for (int i = 0; i < testInputs.GetLength(0); i++)
            {
                int idx = rand.Next(inputs.Count);
                testInputs[i]  = (double[])inputs[idx];
                testOutputs[i] = (double[])outputs[idx];
                inputs.Remove(idx);
                outputs.Remove(idx);
            }

            #endregion

            #region

            _Network.SetActivationFunction(new SigmoidFunction(_SigmoidAlpha));

            BackPropagationLearning teacher = new BackPropagationLearning(_Network);
            teacher.LearningRate = obj._LearningRate;
            teacher.Momentum     = obj._Momentum;

            double error         = 1;
            double maxError      = -10000.0;
            int    maxIterations = obj._MaxIterations;
            double epsilon       = obj._Epsilon;

            // Prepare the error Chart
            double[,] errorValues = new double[maxIterations, 2];

            ProgressState state = new ProgressState();

            double[] errors = new double[maxIterations];

            for (int i = 0; i < maxIterations || error <= epsilon; i++)
            {
                if (maxIterations > 0)
                {
                    if (i >= maxIterations)
                    {
                        break;
                    }
                }
                if (epsilon > 0.0)
                {
                    if (error <= epsilon)
                    {
                        break;
                    }
                }
                // Abort if use requested it
                if (worker.CancellationPending)
                {
                    e.Cancel = true;
                    break;
                }

                // Train
                error = teacher.RunEpoch(trainingInputs, trainingOutputs);

                // Store error for result
                errors[i]   = error;
                state.Error = error;

                // Plot size
                if (maxError < error)
                {
                    maxError = error;
                }

                // Plot values
                errorValues[i, 0] = i;
                errorValues[i, 1] = error;

                // Report progress
                if (i % 10 == 0 || i == (maxIterations - 1))
                {
                    state.Iteration = i + 1;
                    state.Error     = error;

                    worker.ReportProgress((int)((float)(100 * (i + 1)) / (float)maxIterations), state);
                }
            }

            e.Result = errors;


            obj._Chart.RangeX = new DoubleRange(0.0, (double)maxIterations);
            obj._Chart.RangeY = new DoubleRange(0.0, maxError);

            // add new data series to the chart
            obj._Chart.AddDataSeries("Error " + _Series, System.Drawing.Color.DarkGreen, Chart.SeriesType.ConnectedDots, 2);

            // update the chart
            obj._Chart.UpdateDataSeries("Error " + _Series, errorValues);

            #endregion
        }