Beispiel #1
0
        private void InitializeFirstLayer()
        {
            //Put first sample into the nodes of the first layer (X1,X2,X3,X4 into 1st Layer Nodes)
            for (int i = 0; i < nodesPerLayer[0]; i++)
            {
                List<double> temp = new List<double>();
                for (int j = 0; j < list_of_features.Count; j++)
                {
                    temp.Add(list_of_features[j][0]);
                }
                Neuron t = new Neuron();
                t.setInputs(temp);
                hiddenlayers[0].Add(t);
                
            }

            //Initialize weights of the nodes of the first layer to 0
            List<double> tempWeights = new List<double>();
            for (int i = 0; i < list_of_features.Count; i++)
            {
                tempWeights.Add(0);
            }
            for (int i = 0; i < nodesPerLayer[0]; i++)
            {
                hiddenlayers[0][i].setWeights(tempWeights);
            }
           
        }
        public double Backpropagation(double[] expectedValues)
        {
            double totalNetworkCost = 0.0;

            // Compute error for the output neurons to get the ball rolling.
            // See https://github.com/kwende/CSharpNeuralNetworkExplorations/blob/master/Explorations/SimpleMLP/Documentation/OutputNeuronErrors.png
            for (int d = 0; d < expectedValues.Length; d++)
            {
                Neuron outputNeuronBeingExamined = OutputLayer.Neurons[d];
                double expectedOutput            = expectedValues[d];
                double actualOutput = outputNeuronBeingExamined.Activation;
                double actualInput  = outputNeuronBeingExamined.TotalInput;

                double cost = _costFunction.Compute(expectedOutput, actualOutput);
                totalNetworkCost += cost;

                double errorRelativeToActivation =
                    (_costFunction.ComputeDerivativeWRTActivation(actualOutput, expectedOutput));

                double errorWrtToNeuron = errorRelativeToActivation * Math.Sigmoid.ComputeDerivative(actualInput);

                outputNeuronBeingExamined.AddError(errorWrtToNeuron);

                for (int e = 0; e < outputNeuronBeingExamined.UpstreamDendrites.Count; e++)
                {
                    Dendrite dendrite              = outputNeuronBeingExamined.UpstreamDendrites[e];
                    Neuron   upstreamNeuron        = (Neuron)dendrite.UpStreamNeuron;
                    double   errorRelativeToWeight = (errorWrtToNeuron * upstreamNeuron.Activation);

                    dendrite.AddError(errorRelativeToWeight);
                }
            }

            // Compute error for each neuron in each layer moving backwards (backprop).
            for (int d = HiddenLayers.Count - 1; d >= 0; d--)
            {
                HiddenLayer hiddenLayer = HiddenLayers[d];
                for (int e = 0; e < hiddenLayer.Neurons.Count; e++)
                {
                    Neuron thisNeuron = (Neuron)hiddenLayer.Neurons[e];
                    double dropoutBit = hiddenLayer.DropOutMask[e];

                    double input = thisNeuron.TotalInput;

                    double          errorSum            = 0.0;
                    List <Dendrite> downStreamDendrites = thisNeuron.DownstreamDendrites;

                    for (int f = 0; f < downStreamDendrites.Count; f++)
                    {
                        Dendrite currentDendrite  = downStreamDendrites[f];
                        Neuron   downStreamNeuron = currentDendrite.DownStreamNeuron;

                        double delta  = downStreamNeuron.CurrentNeuronError;
                        double weight = currentDendrite.Weight;
                        errorSum += delta * weight;
                    }

                    double errorWrtToThisNeuron = errorSum * Math.Sigmoid.ComputeDerivative(input) * dropoutBit;
                    thisNeuron.AddError(errorWrtToThisNeuron);

                    for (int f = 0; f < thisNeuron.UpstreamDendrites.Count; f++)
                    {
                        Dendrite dendrite              = thisNeuron.UpstreamDendrites[f];
                        Neuron   upstreamNeuron        = (Neuron)dendrite.UpStreamNeuron;
                        double   errorRelativeToWeight = (errorWrtToThisNeuron * upstreamNeuron.Activation);
                        dendrite.AddError(errorRelativeToWeight);
                    }
                }
            }

            return(totalNetworkCost);
        }
Beispiel #3
0
        static int trainingSetNumber         = 2; //Applies to aproximation variant
        #endregion

        public static void Main(String[] args)
        {
            Random gen = new Random();

            if (variant == Variant.transformation)
            {
                double succesfulOutputsCount = 0;
                double totalOutputsCount     = 4 * executionsCount;
                for (int counter = 1; counter <= executionsCount; counter++)
                {
                    string          fileName         = variant.ToString() + "_" + bias.ToString() + "_Execution" + counter.ToString() + "EpochsDiffrences.xml";
                    StreamWriter    sw               = new StreamWriter(fileName);
                    XmlSerializer   xs               = new XmlSerializer(typeof(List <double>));
                    List <double>   EpochsMSEs       = new List <double>();
                    double[][]      testSamples      = LoadTrainingDataFromFileTransformation();
                    double[][]      finalInputOutput = null;
                    List <double[]> trainingSet      = new List <double[]>();


                    RefillTrainingSet(trainingSet, testSamples);


                    Neuron[] hiddenLayer = null;
                    Neuron[] outputLayer = null;
                    InitalizeLayers(ref hiddenLayer, ref outputLayer);
                    for (int i = 1; i <= epochsCount; i++)
                    {
                        double EpochMSE       = 0;
                        double IterationError = 0;
                        EpochMSE = 0;
                        for (int j = trainingSet.Count; j > 0; j--)
                        {
                            IterationError = 0;
                            int      randomIndex = gen.Next(j);
                            double[] inputs1     = trainingSet[randomIndex];
                            double[] inputs2     = new double[hiddenLayerCount];
                            foreach (Neuron n in hiddenLayer)
                            {
                                n.Inputs = inputs1;
                            }
                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                inputs2[k] = hiddenLayer[k].Output();
                            }
                            foreach (Neuron n in outputLayer)
                            {
                                n.Inputs = inputs2;
                            }

                            double[] outputsErrors = new double[4];
                            for (int k = 0; k < outputLayer.Length; k++)
                            {
                                outputsErrors[k] = (inputs1[k] - outputLayer[k].Output());
                                IterationError  += Pow(outputsErrors[k], 2);
                            }
                            for (int k = 0; k < outputLayer.Length; k++)
                            {
                                outputLayer[k].Error = Sigm.FunctionDerivative(outputLayer[k].Output()) * (outputsErrors[k]);
                            }

                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                double value = 0;
                                for (int l = 0; l < hiddenLayer[k].Weights.Length; l++)
                                {
                                    value += Sigm.FunctionDerivative(hiddenLayer[k].Output()) * outputLayer[l].Error * outputLayer[l].Weights[k];
                                }
                                hiddenLayer[k].Error = value;
                            }
                            for (int k = 0; k < outputLayer.Length; k++)
                            {
                                outputLayer[k].UpdateWeights();
                            }
                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                hiddenLayer[k].UpdateWeights();
                            }
                            trainingSet.RemoveAt(randomIndex);
                            EpochMSE += IterationError;
                        }
                        EpochMSE /= 4;
                        RefillTrainingSet(trainingSet, testSamples);
                        if (i % 20 == 1)
                        {
                            EpochsMSEs.Add(EpochMSE);
                        }
                    }



                    for (int i = 0; i < 4; i++)
                    {
                        int      maxIndex = 0;
                        double[] inputs1  = trainingSet[i];
                        double[] inputs2  = new double[hiddenLayerCount];
                        foreach (Neuron n in hiddenLayer)
                        {
                            n.Inputs = inputs1;
                        }
                        for (int j = 0; j < hiddenLayer.Length; j++)
                        {
                            inputs2[j] = hiddenLayer[j].Output();
                        }
                        foreach (Neuron n in outputLayer)
                        {
                            n.Inputs = inputs2;
                        }
                        for (int j = 0; j < outputLayer.Length; j++)
                        {
                            if (outputLayer[j].Output() > outputLayer[maxIndex].Output())
                            {
                                maxIndex = j;
                            }
                        }
                        List <int> indexes = GetNumbers(4);
                        indexes.Remove(maxIndex);
                        for (int j = 0; j < 4; j++)
                        {
                            WriteLine($"Input: {trainingSet[i][j]}  Output: {outputLayer[j].Output()}");
                        }
                        WriteLine();
                        if (outputLayer[indexes[0]].Output() < 0.5 && outputLayer[indexes[1]].Output() < 0.5 && outputLayer[indexes[2]].Output() < 0.5 && outputLayer[maxIndex].Output() > 0.5)
                        {
                            succesfulOutputsCount++;
                        }
                    }
                    WriteLine("================================================");
                    ReadKey();
                    xs.Serialize(sw, EpochsMSEs);
                }
                WriteLine($"Successful: {succesfulOutputsCount}  Total: {totalOutputsCount}");
                XmlSerializer xs1 = new XmlSerializer(typeof(double[]));
                using (StreamWriter sw1 = new StreamWriter(variant.ToString() + "_" + bias.ToString() + "_Execution_stats.xml"))
                {
                    xs1.Serialize(sw1, new double[] { succesfulOutputsCount, totalOutputsCount });
                }
                ReadKey();
            }


            if (variant == Variant.aproximation)
            {
                for (int counter = 1; counter <= executionsCount; counter++)
                {
                    StreamWriter             sw                  = new StreamWriter(variant.ToString() + "_" + bias.ToString() + "_Execution" + counter.ToString() + "EpochsDiffrences.xml");
                    XmlSerializer            xs                  = new XmlSerializer(typeof(List <ApproximationData>));
                    List <ApproximationData> toSerialize         = new List <ApproximationData>();
                    List <double>            trainingDataInputs  = new List <double>();
                    List <double>            trainingDataOutputs = new List <double>();
                    List <double>            testingDataInputs   = new List <double>();
                    List <double>            testingDataOutputs  = new List <double>();
                    LoadTrainingDataFromFileAproximation(trainingDataInputs, trainingDataOutputs, testingDataInputs, testingDataOutputs);


                    Neuron[] hiddenLayer = new Neuron[hiddenLayerCount];
                    Neuron[] outputLayer = new Neuron[1];
                    for (int i = 0; i < hiddenLayer.Length; i++)
                    {
                        hiddenLayer[i] = new Neuron(1, 1);
                        hiddenLayer[i].RandomizeValues();
                    }
                    outputLayer[0] = new Neuron(hiddenLayerCount, 2);
                    outputLayer[0].RandomizeValues();
                    double TrainingMSE = 0;


                    for (int i = 1; i <= epochsCount; i++)
                    {
                        List <int>    numbers     = GetNumbers(trainingDataInputs.Count);
                        List <double> finalOutput = new List <double>();
                        TrainingMSE = 0;
                        for (int j = 0; j < trainingDataInputs.Count; j++)
                        {
                            int randomIndex = gen.Next(numbers.Count);
                            numbers.RemoveAt(randomIndex);
                            double[] hiddenLayerInputs = new double[] { trainingDataInputs[randomIndex] };
                            double[] outputLayerInputs = new double[hiddenLayerCount];


                            foreach (Neuron n in hiddenLayer)
                            {
                                n.Inputs = hiddenLayerInputs;
                            }
                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                outputLayerInputs[k] = hiddenLayer[k].Output();
                            }
                            outputLayer[0].Inputs = outputLayerInputs;


                            double diffrence = 0;
                            diffrence    = trainingDataOutputs[randomIndex] - outputLayer[0].Output();
                            TrainingMSE += Pow(diffrence, 2);

                            outputLayer[0].Error = Linear.FunctionDerivative(outputLayer[0].Output()) * diffrence;
                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                hiddenLayer[k].Error = Sigm.FunctionDerivative(hiddenLayer[k].Output()) * outputLayer[0].Error * outputLayer[0].Weights[k];
                                hiddenLayer[k].UpdateWeights();
                            }
                            outputLayer[0].UpdateWeights();
                        }


                        TrainingMSE /= trainingDataInputs.Count;
                        double TestingMSE = 0;


                        for (int j = 0; j < testingDataInputs.Count; j++)
                        {
                            double[] hiddenLayerInputs = new double[] { testingDataInputs[j] };
                            double[] outputLayerInputs = new double[hiddenLayerCount];


                            foreach (Neuron n in hiddenLayer)
                            {
                                n.Inputs = hiddenLayerInputs;
                            }
                            for (int k = 0; k < hiddenLayer.Length; k++)
                            {
                                outputLayerInputs[k] = hiddenLayer[k].Output();
                            }
                            outputLayer[0].Inputs = outputLayerInputs;

                            TestingMSE += Pow(testingDataOutputs[j] - outputLayer[0].Output(), 2);
                            if (i == epochsCount)
                            {
                                finalOutput.Add(outputLayer[0].Output());
                            }
                        }
                        if (i == epochsCount)
                        {
                            XmlSerializer xs1 = new XmlSerializer(typeof(List <double>));
                            using (StreamWriter sw1 = new StreamWriter(variant.ToString() + "_" + bias.ToString() + "_Execution" + counter.ToString() + "FinalOuput.xml"))
                            {
                                xs1.Serialize(sw1, finalOutput);
                            }
                        }
                        TestingMSE /= testingDataInputs.Count;
                        ApproximationData approximationData;
                        approximationData.MSETrening = TrainingMSE;
                        approximationData.MSETest    = TestingMSE;
                        if (i % 20 == 1)
                        {
                            toSerialize.Add(approximationData);
                        }
                    }

                    xs.Serialize(sw, toSerialize);
                }
            }
        }