Пример #1
0
        private static void displayImage(TrainingExample example)
        {
            for (int i = 1; i <= 784; ++i)
            {
                if (i - 1 % 28 == 0)
                {
                    Console.Write("\n");
                }
                Console.Write(example.input[i, 1] + ", ");
            }
            Console.Write("\n");
            string s = "";

            for (int i = 1; i <= 784; ++i)
            {
                if (example.input[i, 1] == 0)
                {
                    s += " ";
                }
                else if (example.input[i, 1] < 0.5F)
                {
                    s += ".";
                }
                else if (example.input[i, 1] <= 1.0F)
                {
                    s += "O";
                }
                if (i % 28 == 0)
                {
                    s += "\n";
                }
            }
            Console.WriteLine(s);
            Console.WriteLine("\n\n" + example.expectedOutput[1, 1] + " " + example.expectedOutput[2, 1] + " " + example.expectedOutput[3, 1] + " " + example.expectedOutput[4, 1] + " " + example.expectedOutput[5, 1] + " " + example.expectedOutput[6, 1] + " " + example.expectedOutput[7, 1] + " " + example.expectedOutput[8, 1] + " " + example.expectedOutput[9, 1] + " " + example.expectedOutput[10, 1] + " ");
        }
Пример #2
0
        public static void DecimalBinaryExample()
        {
            int[] layers = new int[] { 2, 5, 4 };
            FeedforwardNeuralNetwork fnn = new FeedforwardNeuralNetwork(layers, 1.0F, 0.1F);

            DecimalBinaryTestNetwork(fnn);
            Matrix[]          expectedOutputs = { new Matrix(new float[4, 1] {
                    { 1 },
                    { 0 },
                    { 0 },
                    { 0 }
                }),
                                                  new Matrix(new float[4,          1] {
                    { 0 },
                    { 1 },
                    { 0 },
                    { 0 }
                }),
                                                  new Matrix(new float[4,          1] {
                    { 0 },
                    { 0 },
                    { 1 },
                    { 0 }
                }),
                                                  new Matrix(new float[4,          1] {
                    { 0 },
                    { 0 },
                    { 0 },
                    { 1 }
                }) };
            Matrix[]          inputs = { new Matrix(new float[2, 1] {
                    { 0 },
                    { 0 }
                }),
                                         new Matrix(new float[2,          1] {
                    { 0 },
                    { 1 }
                }),
                                         new Matrix(new float[2,          1] {
                    { 1 },
                    { 0 }
                }),
                                         new Matrix(new float[2,          1] {
                    { 1 },
                    { 1 }
                }) };
            TrainingExample[] examples = new TrainingExample[4];
            for (int i = 0; i < 4; ++i)
            {
                examples[i] = new TrainingExample(inputs[i], expectedOutputs[i]);
            }
            fnn.TrainEpochs(examples, 1000);
            DecimalBinaryTestNetwork(fnn);
        }
        public void Train(TrainingExample trainingExample)
        {
            if (trainingExample.DesiredOutputs.Length != this.OutputNodes)
            {
                throw new Exception("The number of output nodes and the nodes of the example doesn't match");
            }

            this.Query(trainingExample);

            for (int i = 0; i < this.OutputNodes; i++)
            {
                this.OutputLayer[i].Delta = Math.Abs(this.OutputLayer[i].Value - trainingExample.DesiredOutputs[i]);
            }

            for (int i = this.HiddenLayers.Count; i >= 0; i--)
            {
                if (i == this.HiddenLayers.Count)
                {
                    for (int j = 0; j < HiddenLayers[i - 1].Count; j++)
                    {
                        for (int k = 0; k < this.OutputNodes; k++)
                        {
                            this.HiddenLayers[i - 1][j].Delta += Math.Abs((this.HiddenLayers[i - 1][j].Value) -
                                                                          (this.OutputLayer[k].Delta * this.HiddenLayers[i - 1][j].Dendrites[k].Weight));
                        }
                    }
                }
                else if (i == 0)
                {
                    for (int j = 0; j < this.InputNodes; j++)
                    {
                        for (int k = 0; k < this.HiddenLayers[i].Count; k++)
                        {
                            this.InputLayer[j].Delta += Math.Abs((this.InputLayer[j].Value) -
                                                                 (this.HiddenLayers[i][k].Delta * this.InputLayer[j].Dendrites[k].Weight));
                        }
                    }
                }
                else
                {
                    for (int j = 0; j < HiddenLayers[i - 1].Count; j++)
                    {
                        for (int k = 0; k < this.HiddenLayers[i].Count; k++)
                        {
                            this.HiddenLayers[i - 1][j].Delta += Math.Abs((this.HiddenLayers[i - 1][j].Value) -
                                                                          (this.HiddenLayers[i][k].Delta * this.HiddenLayers[i - 1][j].Dendrites[k].Weight));
                        }
                    }
                }
            }
        }
Пример #4
0
        private static TrainingExample[] GetTrainingExamples()
        {
            try {
                Console.WriteLine("Searching for training datasets.");
                FileStream labelsStream = new FileStream(@"E:\Users\Alexander Weaver\My Documents\Programs\MNIST\train-labels.idx1-ubyte", FileMode.Open);
                FileStream imagesStream = new FileStream(@"E:\Users\Alexander Weaver\My Documents\Programs\MNIST\train-images.idx3-ubyte", FileMode.Open);
                Console.WriteLine("Training datasets found.");
                BinaryReader labelsReader = new BinaryReader(labelsStream);
                BinaryReader imagesReader = new BinaryReader(imagesStream);
                int          magic1       = imagesReader.ReadInt32();
                int          numImages    = (imagesReader.ReadByte() << 24) | (imagesReader.ReadByte() << 16) | (imagesReader.ReadByte() << 8) | (imagesReader.ReadByte());
                int          numRows      = imagesReader.ReadInt32();
                int          numColumns   = imagesReader.ReadInt32();
                int          magic2       = labelsReader.ReadInt32();
                int          numLabels    = labelsReader.ReadInt32();
                Console.WriteLine("Popluating training examples.");
                TrainingExample[] trainingExamples = new TrainingExample[numImages];
                //List<TrainingExample> trainingExamples = new List<TrainingExample>();
                for (int r = 0; r < numImages; ++r)
                {
                    Matrix input          = new Matrix(784, 1);
                    Matrix expectedOutput = new Matrix(10, 1);
                    for (int i = 1; i <= 784; ++i)
                    {
                        byte b = imagesReader.ReadByte();
                        input[i, 1] = (float)b / (float)256;
                    }
                    int expectedNum = labelsReader.ReadByte();
                    if (expectedNum == 0)
                    {
                        expectedOutput[10, 1] = 1;
                    }
                    else
                    {
                        expectedOutput[expectedNum, 1] = 1;
                    }
                    trainingExamples[r] = new TrainingExample(input, expectedOutput);

                    /*if ((expectedNum == 2 || expectedNum == 6)) {
                     *  trainingExamples.Add(new TrainingExample(input, expectedOutput));
                     * }*/
                }
                Console.WriteLine("Training examples populated.");
                return(trainingExamples.ToArray());
            } catch (Exception ex) {
                Console.WriteLine(ex.ToString());
                return(null);
            }
        }
Пример #5
0
        public static void LogicGatesExample()
        {
            //First layer is input layer
            //Initialize a 2-layer ANN
            //Two inputs, one output
            int[] layers = new int[] { 2, 3, 1 };
            FeedforwardNeuralNetwork fnn = new FeedforwardNeuralNetwork(layers, 1.0F, 0.1F);
            //Train this many cycles
            int             numTrainingEpochs = 10000;
            TrainingExample ex1 = new TrainingExample(new Matrix(new float[, ] {
                { 0 },
                { 0 }
            }),
                                                      new Matrix(new float[, ] {
                { 0 }
            }));

            TrainingExample ex2 = new TrainingExample(new Matrix(new float[, ] {
                { 0 },
                { 1 }
            }),
                                                      new Matrix(new float[, ] {
                { 1 }
            }));

            TrainingExample ex3 = new TrainingExample(new Matrix(new float[, ] {
                { 1 },
                { 0 }
            }),
                                                      new Matrix(new float[, ] {
                { 1 }
            }));

            TrainingExample ex4 = new TrainingExample(new Matrix(new float[, ] {
                { 1 },
                { 1 }
            }),
                                                      new Matrix(new float[, ] {
                { 0 }
            }));

            TrainingExample[] trainingExamples = { ex1, ex2, ex3, ex4 };
            for (int i = 0; i < numTrainingEpochs; ++i)
            {
                //Sets input matrices and output matrices and trains the network accordingly for all combinations
                Random rand = new Random();
                trainingExamples = trainingExamples.OrderBy(x => rand.Next()).ToArray();
                for (int j = 0; j < 4; ++j)
                {
                    fnn.TrainIteration(trainingExamples[j].input, trainingExamples[j].expectedOutput, 1 - (1.0F * 0.1F / 4));
                }
            }
            //After training, evaluates the network with respect to all possible inputs
            //Prints their outputs to the console to see the result of training
            float[,] inputArray = new float[, ] {
                { 0 },
                { 0 }
            };
            Matrix input  = new Matrix(inputArray);
            Matrix output = fnn.Evaluate(input);

            Console.WriteLine(output[1, 1]);
            inputArray = new float[, ] {
                { 0 },
                { 1 }
            };
            input  = new Matrix(inputArray);
            output = fnn.Evaluate(input);
            Console.WriteLine(output[1, 1]);
            inputArray = new float[, ] {
                { 1 },
                { 0 }
            };
            input  = new Matrix(inputArray);
            output = fnn.Evaluate(input);
            Console.WriteLine(output[1, 1]);
            inputArray = new float[, ] {
                { 1 },
                { 1 }
            };
            input  = new Matrix(inputArray);
            output = fnn.Evaluate(input);
            Console.WriteLine(output[1, 1]);
        }
        //TODO: nueron.value muest be 0 at the start of the loop/ return I think should be a bool
        public bool Query(TrainingExample trainingExample)
        {
            if (trainingExample.InitialValues.Length != this.InputLayer.Count)
            {
                throw new Exception("The number of initual values and the nodes of the input layer doesn't match");
            }

            for (int i = 0; i < trainingExample.InitialValues.Length; i++)
            {
                this.InputLayer[i].Value = trainingExample.InitialValues[i];
            }

            for (int l = 0; l < this.HiddenLayers.Count + 1; l++)
            {
                //Calculates the input values for the first Hidden layer.
                if (l == 0)
                {
                    for (int i = 0; i < this.HiddenLayers[l].Count; i++)
                    {
                        this.HiddenLayers[l][i].Value = 0;
                        for (int j = 0; j < this.InputLayer.Count; j++)
                        {
                            this.HiddenLayers[l][i].Value += this.InputLayer[j].Value * this.InputLayer[j].Dendrites[i].Weight;
                        }
                        this.HiddenLayers[l][i].Value = SigmoidFunction(this.HiddenLayers[l][i].Value + this.HiddenLayers[l][i].Bias);
                    }
                    //Calculates the input values for the Hidden Layers except the last one.
                }
                else if (l == this.HiddenLayers.Count)
                {
                    for (int i = 0; i < this.OutputNodes; i++)
                    {
                        this.OutputLayer[i].Value = 0;
                        for (int j = 0; j < this.HiddenLayers[l - 1].Count; j++)
                        {
                            this.OutputLayer[i].Value += this.HiddenLayers[l - 1][j].Value * this.HiddenLayers[l - 1][j].Dendrites[i].Weight;
                        }
                        this.OutputLayer[i].Value = SigmoidFunction(this.OutputLayer[i].Value + this.OutputLayer[i].Bias);
                    }
                }
                else
                {
                    //Calculates the input values for the last Hidden Layers.
                    for (int i = 0; i < this.HiddenLayers[l].Count; i++)
                    {
                        this.HiddenLayers[l][i].Value = 0;
                        for (int j = 0; j < this.HiddenLayers[l - 1].Count; j++)
                        {
                            this.HiddenLayers[l][i].Value += this.HiddenLayers[l - 1][j].Value * this.HiddenLayers[l - 1][j].Dendrites[i].Weight;
                        }
                        this.HiddenLayers[l][i].Value = SigmoidFunction(this.HiddenLayers[l][i].Value + this.HiddenLayers[l][i].Bias);
                    }
                }
            }

            double[] finalOutputs = new double[this.OutputNodes];

            for (int i = 0; i < this.OutputNodes; i++)
            {
                finalOutputs[i] = this.OutputLayer[i].Value;
            }

            int    maxIndex = 0;
            double aux      = 0.0;

            for (int i = 0; i < this.OutputNodes; i++)
            {
                if (finalOutputs[i] > aux)
                {
                    aux      = finalOutputs[i];
                    maxIndex = i;
                }
            }

            return(maxIndex == trainingExample.ResultIndex);
        }