Ejemplo n.º 1
0
            public double Output()
            {
                double result = 0;

                for (int i = 0; i < Weights.Length; i++)
                {
                    result += Weights[i] * Inputs[i];
                }
                if (bias == MLP.Bias.biasOn)
                {
                    result += Bias;
                }
                if (ActivationFunction == 1)
                {
                    result = Sigm.Function(result);
                }
                if (ActivationFunction == 2)
                {
                    result = Linear.Function(result);
                }
                return(result);
            }
Ejemplo n.º 2
0
        static int trainingSetNumber         = 2; //Applies to aproximation variant
        #endregion

        public static void Main(String[] args)
        {
            Random gen = new Random();

            if (variant == Variant.transformation)
            {
                double succesfulOutputsCount = 0;
                double totalOutputsCount     = 4 * executionsCount;
                for (int counter = 1; counter <= executionsCount; counter++)
                {
                    string          fileName         = variant.ToString() + "_" + bias.ToString() + "_Execution" + counter.ToString() + "EpochsDiffrences.xml";
                    StreamWriter    sw               = new StreamWriter(fileName);
                    XmlSerializer   xs               = new XmlSerializer(typeof(List <double>));
                    List <double>   EpochsMSEs       = new List <double>();
                    double[][]      testSamples      = LoadTrainingDataFromFileTransformation();
                    double[][]      finalInputOutput = null;
                    List <double[]> trainingSet      = new List <double[]>();


                    RefillTrainingSet(trainingSet, testSamples);


                    Neuron[] hiddenLayer = null;
                    Neuron[] outputLayer = null;
                    InitalizeLayers(ref hiddenLayer, ref outputLayer);
                    for (int i = 1; i <= epochsCount; i++)
                    {
                        double EpochMSE       = 0;
                        double IterationError = 0;
                        EpochMSE = 0;
                        for (int j = trainingSet.Count; j > 0; j--)
                        {
                            IterationError = 0;
                            int      randomIndex = gen.Next(j);
                            double[] inputs1     = trainingSet[randomIndex];
                            double[] inputs2     = new double[hiddenLayerCount];
                            foreach (Neuron n in hiddenLayer)
                            {
                                n.Inputs = inputs1;
                            }
                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                inputs2[k] = hiddenLayer[k].Output();
                            }
                            foreach (Neuron n in outputLayer)
                            {
                                n.Inputs = inputs2;
                            }

                            double[] outputsErrors = new double[4];
                            for (int k = 0; k < outputLayer.Length; k++)
                            {
                                outputsErrors[k] = (inputs1[k] - outputLayer[k].Output());
                                IterationError  += Pow(outputsErrors[k], 2);
                            }
                            for (int k = 0; k < outputLayer.Length; k++)
                            {
                                outputLayer[k].Error = Sigm.FunctionDerivative(outputLayer[k].Output()) * (outputsErrors[k]);
                            }

                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                double value = 0;
                                for (int l = 0; l < hiddenLayer[k].Weights.Length; l++)
                                {
                                    value += Sigm.FunctionDerivative(hiddenLayer[k].Output()) * outputLayer[l].Error * outputLayer[l].Weights[k];
                                }
                                hiddenLayer[k].Error = value;
                            }
                            for (int k = 0; k < outputLayer.Length; k++)
                            {
                                outputLayer[k].UpdateWeights();
                            }
                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                hiddenLayer[k].UpdateWeights();
                            }
                            trainingSet.RemoveAt(randomIndex);
                            EpochMSE += IterationError;
                        }
                        EpochMSE /= 4;
                        RefillTrainingSet(trainingSet, testSamples);
                        if (i % 20 == 1)
                        {
                            EpochsMSEs.Add(EpochMSE);
                        }
                    }



                    for (int i = 0; i < 4; i++)
                    {
                        int      maxIndex = 0;
                        double[] inputs1  = trainingSet[i];
                        double[] inputs2  = new double[hiddenLayerCount];
                        foreach (Neuron n in hiddenLayer)
                        {
                            n.Inputs = inputs1;
                        }
                        for (int j = 0; j < hiddenLayer.Length; j++)
                        {
                            inputs2[j] = hiddenLayer[j].Output();
                        }
                        foreach (Neuron n in outputLayer)
                        {
                            n.Inputs = inputs2;
                        }
                        for (int j = 0; j < outputLayer.Length; j++)
                        {
                            if (outputLayer[j].Output() > outputLayer[maxIndex].Output())
                            {
                                maxIndex = j;
                            }
                        }
                        List <int> indexes = GetNumbers(4);
                        indexes.Remove(maxIndex);
                        for (int j = 0; j < 4; j++)
                        {
                            WriteLine($"Input: {trainingSet[i][j]}  Output: {outputLayer[j].Output()}");
                        }
                        WriteLine();
                        if (outputLayer[indexes[0]].Output() < 0.5 && outputLayer[indexes[1]].Output() < 0.5 && outputLayer[indexes[2]].Output() < 0.5 && outputLayer[maxIndex].Output() > 0.5)
                        {
                            succesfulOutputsCount++;
                        }
                    }
                    WriteLine("================================================");
                    ReadKey();
                    xs.Serialize(sw, EpochsMSEs);
                }
                WriteLine($"Successful: {succesfulOutputsCount}  Total: {totalOutputsCount}");
                XmlSerializer xs1 = new XmlSerializer(typeof(double[]));
                using (StreamWriter sw1 = new StreamWriter(variant.ToString() + "_" + bias.ToString() + "_Execution_stats.xml"))
                {
                    xs1.Serialize(sw1, new double[] { succesfulOutputsCount, totalOutputsCount });
                }
                ReadKey();
            }


            if (variant == Variant.aproximation)
            {
                for (int counter = 1; counter <= executionsCount; counter++)
                {
                    StreamWriter             sw                  = new StreamWriter(variant.ToString() + "_" + bias.ToString() + "_Execution" + counter.ToString() + "EpochsDiffrences.xml");
                    XmlSerializer            xs                  = new XmlSerializer(typeof(List <ApproximationData>));
                    List <ApproximationData> toSerialize         = new List <ApproximationData>();
                    List <double>            trainingDataInputs  = new List <double>();
                    List <double>            trainingDataOutputs = new List <double>();
                    List <double>            testingDataInputs   = new List <double>();
                    List <double>            testingDataOutputs  = new List <double>();
                    LoadTrainingDataFromFileAproximation(trainingDataInputs, trainingDataOutputs, testingDataInputs, testingDataOutputs);


                    Neuron[] hiddenLayer = new Neuron[hiddenLayerCount];
                    Neuron[] outputLayer = new Neuron[1];
                    for (int i = 0; i < hiddenLayer.Length; i++)
                    {
                        hiddenLayer[i] = new Neuron(1, 1);
                        hiddenLayer[i].RandomizeValues();
                    }
                    outputLayer[0] = new Neuron(hiddenLayerCount, 2);
                    outputLayer[0].RandomizeValues();
                    double TrainingMSE = 0;


                    for (int i = 1; i <= epochsCount; i++)
                    {
                        List <int>    numbers     = GetNumbers(trainingDataInputs.Count);
                        List <double> finalOutput = new List <double>();
                        TrainingMSE = 0;
                        for (int j = 0; j < trainingDataInputs.Count; j++)
                        {
                            int randomIndex = gen.Next(numbers.Count);
                            numbers.RemoveAt(randomIndex);
                            double[] hiddenLayerInputs = new double[] { trainingDataInputs[randomIndex] };
                            double[] outputLayerInputs = new double[hiddenLayerCount];


                            foreach (Neuron n in hiddenLayer)
                            {
                                n.Inputs = hiddenLayerInputs;
                            }
                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                outputLayerInputs[k] = hiddenLayer[k].Output();
                            }
                            outputLayer[0].Inputs = outputLayerInputs;


                            double diffrence = 0;
                            diffrence    = trainingDataOutputs[randomIndex] - outputLayer[0].Output();
                            TrainingMSE += Pow(diffrence, 2);

                            outputLayer[0].Error = Linear.FunctionDerivative(outputLayer[0].Output()) * diffrence;
                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                hiddenLayer[k].Error = Sigm.FunctionDerivative(hiddenLayer[k].Output()) * outputLayer[0].Error * outputLayer[0].Weights[k];
                                hiddenLayer[k].UpdateWeights();
                            }
                            outputLayer[0].UpdateWeights();
                        }


                        TrainingMSE /= trainingDataInputs.Count;
                        double TestingMSE = 0;


                        for (int j = 0; j < testingDataInputs.Count; j++)
                        {
                            double[] hiddenLayerInputs = new double[] { testingDataInputs[j] };
                            double[] outputLayerInputs = new double[hiddenLayerCount];


                            foreach (Neuron n in hiddenLayer)
                            {
                                n.Inputs = hiddenLayerInputs;
                            }
                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                outputLayerInputs[k] = hiddenLayer[k].Output();
                            }
                            outputLayer[0].Inputs = outputLayerInputs;

                            TestingMSE += Pow(testingDataOutputs[j] - outputLayer[0].Output(), 2);
                            if (i == epochsCount)
                            {
                                finalOutput.Add(outputLayer[0].Output());
                            }
                        }
                        if (i == epochsCount)
                        {
                            XmlSerializer xs1 = new XmlSerializer(typeof(List <double>));
                            using (StreamWriter sw1 = new StreamWriter(variant.ToString() + "_" + bias.ToString() + "_Execution" + counter.ToString() + "FinalOuput.xml"))
                            {
                                xs1.Serialize(sw1, finalOutput);
                            }
                        }
                        TestingMSE /= testingDataInputs.Count;
                        ApproximationData approximationData;
                        approximationData.MSETrening = TrainingMSE;
                        approximationData.MSETest    = TestingMSE;
                        if (i % 20 == 1)
                        {
                            toSerialize.Add(approximationData);
                        }
                    }

                    xs.Serialize(sw, toSerialize);
                }
            }
        }