Exemple #1
0
        public void Nudge(double scalar)
        {
            // Go through all of the weights and biases and augment them
            for (int l = 0; l < layerCount; l++)
            {
                for (int j = 0; j < layerSize[l]; j++)
                {
                    // Nudge the weights
                    for (int i = 0; i < (l == 0 ? inputSize : layerSize[l - 1]); i++)
                    {
                        double w = weight[l][i][j];
                        double u = Gaussian.GetRandomGaussian(0f, w * scalar);
                        weight[l][i][j] += u;
                        previousWeightDelta[l][i][j] = 0f;
                    }

                    // Nudge the bias
                    double b = bias[l][j];
                    double v = Gaussian.GetRandomGaussian(0f, b * scalar);
                    bias[l][j] += v;
                    previousBiasDelta[l][j] = 0f;
                }
            }
        }
        public BackPropagationNetwork(int[] layerSizes, TransferFunction[] TransferFunctions)
        {
            // Перевірка вхідних даних
            if (TransferFunctions.Length != layerSizes.Length || TransferFunctions[0] != TransferFunction.None)
            {
                throw new ArgumentException("The network cannot be created with these parameters");
            }

            // Ініціалізація шарів мережі
            layerCount       = layerSizes.Length - 1;
            inputSize        = layerSizes[0];
            layerSize        = new int[layerCount];
            transferFunction = new TransferFunction[layerCount];

            for (int i = 0; i < layerCount; i++)
            {
                layerSize[i] = layerSizes[i + 1];
            }

            for (int i = 0; i < layerCount; i++)
            {
                transferFunction[i] = TransferFunctions[i + 1];
            }

            // Визначення вимірів масивів
            bias             = new double[layerCount][];
            previosBiasDelta = new double[layerCount][];
            delta            = new double[layerCount][];
            layerOtput       = new double[layerCount][];
            layerInput       = new double[layerCount][];

            weight = new double[layerCount][][];
            previousWeightDelta = new double[layerCount][][];

            // Заповнення двовимірних масивів
            for (int l = 0; l < layerCount; l++)
            {
                bias[l]             = new double[layerSize[l]];
                previosBiasDelta[l] = new double[layerSize[l]];
                delta[l]            = new double[layerSize[l]];
                layerOtput[l]       = new double[layerSize[l]];
                layerInput[l]       = new double[layerSize[l]];

                weight[l] = new double[l == 0 ? inputSize : layerSize[l - 1]][];
                previousWeightDelta[l] = new double[l == 0 ? inputSize : layerSize[l - 1]][];

                for (int i = 0; i < (l == 0 ? inputSize : layerSize[l - 1]); i++)
                {
                    weight[l][i] = new double[layerSize[l]];
                    previousWeightDelta[l][i] = new double[layerSize[l]];
                }
            }

            // Ініціалізація ваг
            for (int l = 0; l < layerCount; l++)
            {
                for (int i = 0; i < layerSize[l]; i++)
                {
                    bias[l][i]             = Gaussian.GetRandomGaussian();
                    previosBiasDelta[l][i] = 0.0;
                    layerInput[l][i]       = 0.0;
                    layerOtput[l][i]       = 0.0;
                    delta[l][i]            = 0.0;
                }

                for (int i = 0; i < (l == 0 ? inputSize : layerSize[l - 1]); i++)
                {
                    for (int j = 0; j < layerSize[l]; j++)
                    {
                        weight[l][i][j] = Gaussian.GetRandomGaussian();
                        previousWeightDelta[l][i][j] = 0.0;
                    }
                }
            }
        }
        public BackPropagationNetwork(int[] inputLayerSizes, TransferFunction[] inputTransferFunctions)
        {
            if (inputTransferFunctions.Length != inputLayerSizes.Length)
            {
                throw new ArgumentException("There is not an equal number of layers and transfer functions.");
            }
            if (inputTransferFunctions[0] != TransferFunction.None)
            {
                throw new ArgumentException("The first transfer function must be None");
            }

            LayerCount = inputLayerSizes.Length - 1;
            InputSize  = inputLayerSizes[0];
            LayerSize  = new int[LayerCount];

            for (int i = 0; i < LayerCount; i++)
            {
                LayerSize[i] = inputLayerSizes[i + 1];
            }

            TransferFunctions = new TransferFunction[LayerCount];

            for (int i = 0; i < LayerCount; i++)
            {
                TransferFunctions[i] = inputTransferFunctions[i + 1];
            }

            Bias = new double[LayerCount][];
            PreviousBiasDelta   = new double[LayerCount][];
            Delta               = new double[LayerCount][];
            LayerOutput         = new double[LayerCount][];
            LayerInput          = new double[LayerCount][];
            Weight              = new double[LayerCount][][];
            PreviousWeightDelta = new double[LayerCount][][];

            for (int l = 0; l < LayerCount; l++)
            {
                Bias[l] = new double[LayerSize[l]];
                PreviousBiasDelta[l] = new double[LayerSize[l]];
                Delta[l]             = new double[LayerSize[l]];
                LayerOutput[l]       = new double[LayerSize[l]];
                LayerInput[l]        = new double[LayerSize[l]];

                Weight[l] = new double[l == 0 ? InputSize : LayerSize[l - 1]][];
                PreviousWeightDelta[l] = new double[l == 0 ? InputSize : LayerSize[l - 1]][];

                for (int i = 0; i < (l == 0 ? InputSize : LayerSize[l - 1]); i++)
                {
                    Weight[l][i] = new double[LayerSize[l]];
                    PreviousWeightDelta[l][i] = new double[LayerSize[l]];
                }
            }

            // Initialize the weights
            for (int l = 0; l < LayerCount; l++)
            {
                for (int j = 0; j < LayerSize[l]; j++)
                {
                    Bias[l][j] = Gaussian.GetRandomGaussian();
                    PreviousBiasDelta[l][j] = 0;
                    LayerOutput[l][j]       = 0;
                    LayerInput[l][j]        = 0;
                    Delta[l][j]             = 0;
                }

                for (int i = 0; i < (l == 0 ? InputSize : LayerSize[l - 1]); i++)
                {
                    for (int j = 0; j < LayerSize[l]; j++)
                    {
                        Weight[l][i][j] = Gaussian.GetRandomGaussian();
                        PreviousWeightDelta[l][i][j] = 0;
                    }
                }
            }
        }
        public double[][][] previousWeightDelta; //ağırlıklara düşen hatapaylarını saklamak için

        #endregion Private Data

        #region Constructors
        //Normal dağılım gösteren rastgele ağırlıklar ile ağ oluşturulur
        public BackPropagationNetwork(int[] layerSizes, TransferFunction[] transferFunctions)
        {
            // Ağın katmanlarının belirlenmesi
            layerCount = layerSizes.Length - 1; //girdi katmanı hariç katman sayısı
            inputSize  = layerSizes[0];         //girdi katmanındaki nöron sayısı
            layerSize  = new int[layerCount];   //her katmandaki nöron sayısının tutulacağı dizi(girdi katmanı hariç)

            //katmanların nöron sayısı belirlenir
            for (int i = 0; i < layerCount; i++)
            {
                layerSize[i] = layerSizes[i + 1];
            }

            //katmanların transfer fonksiyonları belirlenir
            transferFunction = new TransferFunction[layerCount];
            for (int i = 0; i < layerCount; i++)
            {
                transferFunction[i] = transferFunctions[i + 1];
            }

            //Katmandaki nöronların girdi, çıktı, bias, hata, önceki bias, önceki hata, ağırlıklarının belirlenmesi
            bias = new double[layerCount][];
            previousBiasDelta = new double[layerCount][];
            delta             = new double[layerCount][];
            layerOutput       = new double[layerCount][];
            layerInput        = new double[layerCount][];

            weight = new double[layerCount][][];
            previousWeightDelta = new double[layerCount][][];

            // İki boyutlu dizilerin doldurulması
            for (int l = 0; l < layerCount; l++)
            {
                bias[l] = new double[layerSize[l]];              // l.katmandaki nöron sayısı kadar bias oluşturulur.
                previousBiasDelta[l] = new double[layerSize[l]];
                delta[l]             = new double[layerSize[l]]; //l.katmandaki nöron sayısı kadar hata oluşturulur.
                layerOutput[l]       = new double[layerSize[l]]; //l.katmandaki nöronlar için çıktı oluşturulur.
                layerInput[l]        = new double[layerSize[l]]; //l.katmandaki nöronlar için girdi oluşturulur.

                weight[l] = new double[l == 0 ? inputSize : layerSize[l - 1]][];
                previousWeightDelta[l] = new double[l == 0 ? inputSize : layerSize[l - 1]][];

                for (int i = 0; i < (l == 0 ? inputSize : layerSize[l - 1]); i++)
                {
                    weight[l][i] = new double[layerSize[l]];
                    previousWeightDelta[l][i] = new double[layerSize[l]];
                }
            }

            // Initialize the weights
            for (int l = 0; l < layerCount; l++)
            {
                for (int j = 0; j < layerSize[l]; j++)
                {
                    bias[l][j] = Gaussian.GetRandomGaussian();
                    previousBiasDelta[l][j] = 0.0;
                    layerOutput[l][j]       = 0.0;
                    layerInput[l][j]        = 0.0;
                    delta[l][j]             = 0.0;
                }

                for (int i = 0; i < (l == 0 ? inputSize : layerSize[l - 1]); i++)
                {
                    for (int j = 0; j < layerSize[l]; j++)
                    {
                        weight[l][i][j] = Gaussian.GetRandomGaussian();
                        previousWeightDelta[l][i][j] = 0.0;
                    }
                }
            }
        }
Exemple #5
0
        public BackPropagationNetwork(int[] layerSizes, TransferFunction[] transferFunctions)
        {
            // Validate the input data
            if (transferFunctions.Length != layerSizes.Length || transferFunctions[0] != TransferFunction.None)
            {
                throw new ArgumentException("Cannot construct a network with these parameters.");
            }

            // Initialize network layers
            layerCount = layerSizes.Length - 1;
            inputSize  = layerSizes[0];
            layerSize  = new int[layerCount];

            for (int i = 0; i < layerCount; i++)
            {
                layerSize[i] = layerSizes[i + 1];
            }

            transferFunction = new TransferFunction[layerCount];
            for (int i = 0; i < layerCount; i++)
            {
                transferFunction[i] = transferFunctions[i + 1];
            }

            // Start dimensioning arrays
            bias = new double[layerCount][];
            previousBiasDelta = new double[layerCount][];
            delta             = new double[layerCount][];
            layerOutput       = new double[layerCount][];
            layerInput        = new double[layerCount][];

            weight = new double[layerCount][][];
            previousWeightDelta = new double[layerCount][][];

            // Fill 2 dimensional arrays
            for (int l = 0; l < layerCount; l++)
            {
                bias[l] = new double[layerSize[l]];
                previousBiasDelta[l] = new double[layerSize[l]];
                delta[l]             = new double[layerSize[l]];
                layerOutput[l]       = new double[layerSize[l]];
                layerInput[l]        = new double[layerSize[l]];

                weight[l] = new double[l == 0 ? inputSize : layerSize[l - 1]][];
                previousWeightDelta[l] = new double[l == 0 ? inputSize : layerSize[l - 1]][];

                for (int i = 0; i < (l == 0 ? inputSize : layerSize[l - 1]); i++)
                {
                    weight[l][i] = new double[layerSize[l]];
                    previousWeightDelta[l][i] = new double[layerSize[l]];
                }
            }

            // Initialize the weights
            for (int l = 0; l < layerCount; l++)
            {
                for (int j = 0; j < layerSize[l]; j++)
                {
                    bias[l][j] = Gaussian.GetRandomGaussian();
                    previousBiasDelta[l][j] = 0.0;
                    layerOutput[l][j]       = 0.0;
                    layerInput[l][j]        = 0.0;
                    delta[l][j]             = 0.0;
                }

                for (int i = 0; i < (l == 0 ? inputSize : layerSize[l - 1]); i++)
                {
                    for (int j = 0; j < layerSize[l]; j++)
                    {
                        weight[l][i][j] = Gaussian.GetRandomGaussian();
                        previousWeightDelta[l][i][j] = 0.0;
                    }
                }
            }
        }
Exemple #6
0
        public BackPropagationNetwork(int[] layerSizes, TransferFunctionEnum[] transferFunctions)
        {
            // Input validation
            if (transferFunctions.Length != layerSizes.Length || transferFunctions[0] != TransferFunctionEnum.None)
            {
                throw new ArgumentException("Cannot construct a network with these parameters.");
            }

            // Layer initializaztion
            this.layerCount = layerSizes.Length - 1; // excluding the input layer
            this.layerSize  = new int[layerCount];
            this.inputSize  = layerSizes[0];

            for (int i = 0; i < layerCount; i++)
            {
                layerSize[i] = layerSizes[i + 1];
            }

            this.transferFunction = new TransferFunctionEnum[this.layerCount];

            for (int i = 0; i < this.layerCount; i++)
            {
                this.transferFunction[i] = transferFunctions[i + 1];
            }

            // Array dimensioning
            this.bias = new double[this.layerCount][];
            this.previousBiasDelta = new double[this.layerCount][];
            this.layerOutput       = new double[this.layerCount][];
            this.layerInput        = new double[this.layerCount][];

            this.weight = new double[this.layerCount][][];
            this.previousWeightDelta = new double[this.layerCount][][];

            // 2D array initialization
            for (int l = 0; l < this.layerCount; l++)
            {
                this.bias[l] = new double[this.layerSize[l]];
                this.previousBiasDelta[l] = new double[this.layerSize[l]];
                this.layerOutput[l]       = new double[this.layerSize[l]];
                this.layerInput[l]        = new double[this.layerSize[l]];

                this.weight[l] = new double[(l == 0) ? this.inputSize : this.layerSize[l - 1]][];
                this.previousWeightDelta[l] = new double[(l == 0) ? this.inputSize : this.layerSize[l - 1]][];

                for (int i = 0; i < ((l == 0) ? inputSize : this.layerSize[l - 1]); i++)
                {
                    this.weight[l][i] = new double[layerSize[l]];
                    this.previousWeightDelta[l][i] = new double[layerSize[l]];
                }
            }

            // Initialize weights
            // For each layer
            for (int l = 0; l < this.layerCount; l++)
            {
                // Go through each neuron in the current layer
                for (int j = 0; j < this.layerSize[l]; j++)
                {
                    this.bias[l][j] = Gaussian.GetRandomGaussian();
                    this.previousBiasDelta[l][j] = 0.0;
                    this.layerOutput[l][j]       = 0.0;
                    this.layerInput[l][j]        = 0.0;
                }

                // For each neuron in the previous layer
                for (int i = 0; i < ((l == 0) ? this.inputSize : this.layerSize[l - 1]); i++)
                {
                    // Go through each neuron in the current layer and set the connective weights
                    // to something randomly distributed
                    for (int j = 0; j < layerSize[l]; j++)
                    {
                        this.weight[l][i][j] = Gaussian.GetRandomGaussian();
                        this.previousWeightDelta[l][i][j] = 0.0;
                    }
                }
            }
        }