예제 #1
0
        public Network(int inputSize, int layerSize, int layerCount, int outputSize)
        {
            InputSize  = inputSize;
            OutputSize = outputSize;
            LayerCount = layerCount;

            if (layerCount > 0)
            {
                HiddernLayers.Add(new NeuronLayer(layerSize, InputSize));

                for (int x = 1; x < LayerCount; x++)
                {
                    HiddernLayers.Add(new NeuronLayer(layerSize, layerSize));
                }

                LastLayer = new NeuronLayer(OutputSize, layerSize);
            }
            else if (layerCount == 0)
            {
                LastLayer = new NeuronLayer(OutputSize, InputSize);
            }
            else
            {
                throw new ArgumentOutOfRangeException("Input size cannot be negative");
            }
        }
예제 #2
0
        public Vector Propergate(Vector input)
        {
            // Validate Input
            if (input.Count != InputSize)
            {
                throw new ArgumentOutOfRangeException();
            }

            if (LayerCount == 0) // easy case
            {
                LastLayer.Input = input;
            }
            else // move data through layers
            {
                HiddernLayers[0].Input = input;

                for (int currentLayer = 1; currentLayer < LayerCount; currentLayer++)
                {
                    HiddernLayers[currentLayer].Input = HiddernLayers[currentLayer - 1].Output;
                }

                LastLayer.Input = HiddernLayers.Last().Output;
            }

            return(LastLayer.Output);
        }
예제 #3
0
        public double TrainToData(Vector input, Vector requiredOutput, double learningSpeed)
        {
            // Validate Input
            if (input.Count != InputSize)
            {
                throw new ArgumentOutOfRangeException();
            }

            Vector result = Propergate(input);

            LastLayer.Correction = requiredOutput - result;

            // back propergate corrections
            if (LayerCount == 0)
            {
                // easy
            }
            else
            {
                HiddernLayers.Last().Correction = LastLayer.BackPropergation;

                for (int currectLayer = LayerCount - 2; currectLayer >= 0; currectLayer--)
                {
                    HiddernLayers[currectLayer].Correction = HiddernLayers[currectLayer + 1].BackPropergation;
                }
            }

            LastLayer.CorrectForError(learningSpeed);
            HiddernLayers.ForEach(layer => layer.CorrectForError(learningSpeed));

            return((requiredOutput - result).Average(correction => System.Math.Abs(correction)));
        }