Exemplo n.º 1
0
        // Похідна функції активації
        public static double DerivativeEvaluate(TransferFunction tFunc, double input)
        {
            switch (tFunc)
            {
            case TransferFunction.Linear:
                return(linear_derivative(input));

            case TransferFunction.Sigmoid:
                return(sigmoid_derivative(input));

            case TransferFunction.BipolarSigmoid:
                return(bipolarsigmoid_derivative(input));

            case TransferFunction.None:
            default:
                return(0.0);
            }
        }
Exemplo n.º 2
0
        public void Run(ref double[] input, out double[] output)
        {
            // Check if there is enough data
            if (input.Length != this.inputSize)
            {
                throw new ArgumentException("Input data is not of the correcft dimension.");
            }

            // Dimension the output array
            output = new double[this.layerSize[this.layerCount - 1]];

            // Run the network
            for (int currentLayer = 0; currentLayer < this.layerCount; currentLayer++)
            {
                for (int currentNeuron = 0; currentNeuron < this.layerSize[currentLayer]; currentNeuron++)
                {
                    double sum = 0.0;

                    // Compute the sum of the products of the weights (connections) from all neurons
                    // in the previous layer to the current neuron in the current layer
                    // times the output from all nodes in the previous layer
                    for (int prevLayerCurrentNeuron = 0;
                         prevLayerCurrentNeuron < ((currentLayer == 0) ? inputSize : this.layerSize[currentLayer - 1]);
                         prevLayerCurrentNeuron++)
                    {
                        sum += this.weight[currentLayer][prevLayerCurrentNeuron][currentNeuron]
                               * ((currentLayer == 0) ? input[prevLayerCurrentNeuron] : this.layerOutput[currentLayer - 1][prevLayerCurrentNeuron]);
                    }

                    sum += this.bias[currentLayer][currentNeuron];

                    this.layerInput[currentLayer][currentNeuron]  = sum;
                    this.layerOutput[currentLayer][currentNeuron] = TransferFunction.GetSigmoid(this.transferFunction[currentLayer], sum);
                }
            }

            // Copy the output to the output array
            for (int i = 0; i < this.layerSize[this.layerCount - 1]; i++)
            {
                output[i] = this.layerOutput[layerCount - 1][i];
            }
        }
Exemplo n.º 3
0
        public static double Evaluate(TransferFunction transferFunction, double input)
        {
            switch (transferFunction)
            {
            case TransferFunction.Sigmoid:
                return(Sigmoid(input));

            case TransferFunction.Linear:
                return(Linear(input));

            case TransferFunction.Gaussian:
                return(Gaussian(input));

            case TransferFunction.RationalSigmoid:
                return(RationalSigmoid(input));

            default:
                return(0.0);
            }
        }
        public static double EvaluateDerivative(TransferFunction tFunc, double input)
        {
            switch (tFunc)
            {
            case TransferFunction.Sigmoid:
                return(sigmoidDerivative(input));

            case TransferFunction.Linear:
                return(linearDerivative(input));

            case TransferFunction.Gaussian:
                return(gaussianDerivative(input));

            case TransferFunction.RationalSigmoid:
                return(rationalSigmoidDerivative(input));

            case TransferFunction.None:
            default:
                return(0.0);
            }
        }
        public static double EvaluateDerivative(TransferFunction tFunc, double input)
        {
            switch (tFunc)
            {
            case TransferFunction.Sigmoid:
                return(sigmoid_derivative(input));

            case TransferFunction.Linear:
                return(linear_derivative(input));

            case TransferFunction.Gaussian:
                return(gaussian_derivative(input));

            case TransferFunction.RationalSigmoid:
                return(rationalsigmoid_derivative(input));

            case TransferFunction.TangentHyperbolic:
                return(tangenthyperbolic_derivative(input));

            case TransferFunction.None:
            default:
                return(0.0);
            }
        }
        public BackPropagationNetwork(int[] inputLayerSizes, TransferFunction[] inputTransferFunctions)
        {
            if (inputTransferFunctions.Length != inputLayerSizes.Length)
            {
                throw new ArgumentException("There is not an equal number of layers and transfer functions.");
            }
            if (inputTransferFunctions[0] != TransferFunction.None)
            {
                throw new ArgumentException("The first transfer function must be None");
            }

            LayerCount = inputLayerSizes.Length - 1;
            InputSize  = inputLayerSizes[0];
            LayerSize  = new int[LayerCount];

            for (int i = 0; i < LayerCount; i++)
            {
                LayerSize[i] = inputLayerSizes[i + 1];
            }

            TransferFunctions = new TransferFunction[LayerCount];

            for (int i = 0; i < LayerCount; i++)
            {
                TransferFunctions[i] = inputTransferFunctions[i + 1];
            }

            Bias = new double[LayerCount][];
            PreviousBiasDelta   = new double[LayerCount][];
            Delta               = new double[LayerCount][];
            LayerOutput         = new double[LayerCount][];
            LayerInput          = new double[LayerCount][];
            Weight              = new double[LayerCount][][];
            PreviousWeightDelta = new double[LayerCount][][];

            for (int l = 0; l < LayerCount; l++)
            {
                Bias[l] = new double[LayerSize[l]];
                PreviousBiasDelta[l] = new double[LayerSize[l]];
                Delta[l]             = new double[LayerSize[l]];
                LayerOutput[l]       = new double[LayerSize[l]];
                LayerInput[l]        = new double[LayerSize[l]];

                Weight[l] = new double[l == 0 ? InputSize : LayerSize[l - 1]][];
                PreviousWeightDelta[l] = new double[l == 0 ? InputSize : LayerSize[l - 1]][];

                for (int i = 0; i < (l == 0 ? InputSize : LayerSize[l - 1]); i++)
                {
                    Weight[l][i] = new double[LayerSize[l]];
                    PreviousWeightDelta[l][i] = new double[LayerSize[l]];
                }
            }

            // Initialize the weights
            for (int l = 0; l < LayerCount; l++)
            {
                for (int j = 0; j < LayerSize[l]; j++)
                {
                    Bias[l][j] = Gaussian.GetRandomGaussian();
                    PreviousBiasDelta[l][j] = 0;
                    LayerOutput[l][j]       = 0;
                    LayerInput[l][j]        = 0;
                    Delta[l][j]             = 0;
                }

                for (int i = 0; i < (l == 0 ? InputSize : LayerSize[l - 1]); i++)
                {
                    for (int j = 0; j < LayerSize[l]; j++)
                    {
                        Weight[l][i][j] = Gaussian.GetRandomGaussian();
                        PreviousWeightDelta[l][i][j] = 0;
                    }
                }
            }
        }
        public void Load(string FilePath)
        {
            if (string.IsNullOrEmpty(FilePath))
            {
                throw new ArgumentNullException("FilePath");
            }

            _doc = new XmlDocument();
            _doc.Load(FilePath);

            string basePath = "";
            string nodePath = "";
            double value;

            // Load from xml

            if (XPathValue("NeuralNetwork/@Type") != "BackPropagation")
            {
                return;
            }

            basePath = "NeuralNetwork/Parameters";

            Name = XPathValue(basePath + "/Name");

            int inputSize;

            int.TryParse(XPathValue(basePath + "/InputSize"), out inputSize);
            InputSize = inputSize;

            int layerCount;

            int.TryParse(XPathValue(basePath + "/LayerCount"), out layerCount);
            LayerCount = layerCount;

            LayerSize         = new int[layerCount];
            TransferFunctions = new TransferFunction[layerCount];

            basePath += "/Layers/Layer";

            for (int l = 0; l < layerCount; l++)
            {
                int layerSizeOfL;
                int.TryParse(XPathValue(basePath + "[@Index='" + l + "']/@Size"), out layerSizeOfL);
                LayerSize[l] = layerSizeOfL;

                TransferFunction transferFunctionOfL;
                Enum.TryParse(XPathValue(basePath + "[@Index='" + l + "']/@Type"), out transferFunctionOfL);
                TransferFunctions[l] = transferFunctionOfL;
            }

            // Parse Weights element
            for (int l = 0; l < LayerCount; l++)
            {
                basePath = "NeuralNetwork/Weights/Layer[@Index='" + l + "']/";
                for (int j = 0; j < LayerSize[l]; j++)
                {
                    nodePath = "Node[@Index='" + j + "']/@Bias";
                    double biasOfLJ;
                    double.TryParse(XPathValue(basePath + nodePath), out biasOfLJ);
                    Bias[l][j] = biasOfLJ;
                    PreviousBiasDelta[l][j] = 0;
                    LayerOutput[l][j]       = 0;
                    LayerInput[l][j]        = 0;
                    Delta[l][j]             = 0;
                }

                for (int i = 0; i < (l == 0 ? InputSize : LayerSize[l - 1]); i++)
                {
                    for (int j = 0; j < LayerSize[l]; j++)
                    {
                        nodePath = "Node[@Index='" + j + "']/Axon[@Index='" + i + "']";
                        double weightOfLij;
                        double.TryParse(XPathValue(basePath + nodePath), out weightOfLij);
                        Weight[l][i][j] = weightOfLij;
                        PreviousWeightDelta[l][i][j] = 0;
                    }
                }
            }

            // release
            _doc = null;
        }
Exemplo n.º 8
0
 /// <summary>
 /// Change the transfer function, by default Sigmoid
 /// </summary>
 /// <param name="transferFunction"></param>
 public void SetTransferFunction(TransferFunction.Function transferFunction)
 {
     func = TransferFunction.GetTransferFunction(transferFunction);
 }
Exemplo n.º 9
0
        public void Load(string FilePath)
        {
            if (FilePath == null)
            {
                return;
            }

            doc = new XmlDocument();
            doc.Load(FilePath);

            string BasePath = "", NodePath = "";
            double value;

            // Load from xml
            if (xPathValue("NeuralNetwork/@Type") != "BackPropagation")
            {
                return;
            }

            BasePath = "NeuralNetwork/Parameters/";

            int.TryParse(xPathValue(BasePath + "inputSize"), out inputSize);
            int.TryParse(xPathValue(BasePath + "layerCount"), out layerCount);

            layerSize        = new int[layerCount];
            transferFunction = new TransferFunction[layerCount];

            BasePath = "NeuralNetwork/Parameters/Layers/Layer";
            for (int l = 0; l < layerCount; l++)
            {
                int.TryParse(xPathValue(BasePath + "[@Index='" + l.ToString() + "']/@Size"), out layerSize[l]);
                Enum.TryParse <TransferFunction>(xPathValue(BasePath + "[@Index='" + l.ToString() + "']/@Type"), out transferFunction[l]);
            }

            // Parse the Weights element

            // Start dimensioning arrays
            bias = new double[layerCount][];
            previousBiasDelta = new double[layerCount][];
            delta             = new double[layerCount][];
            layerOutput       = new double[layerCount][];
            layerInput        = new double[layerCount][];

            weight = new double[layerCount][][];
            previousWeightDelta = new double[layerCount][][];

            // Fill 2 dimensional arrays
            for (int l = 0; l < layerCount; l++)
            {
                bias[l] = new double[layerSize[l]];
                previousBiasDelta[l] = new double[layerSize[l]];
                delta[l]             = new double[layerSize[l]];
                layerOutput[l]       = new double[layerSize[l]];
                layerInput[l]        = new double[layerSize[l]];

                weight[l] = new double[l == 0 ? inputSize : layerSize[l - 1]][];
                previousWeightDelta[l] = new double[l == 0 ? inputSize : layerSize[l - 1]][];

                for (int i = 0; i < (l == 0 ? inputSize : layerSize[l - 1]); i++)
                {
                    weight[l][i] = new double[layerSize[l]];
                    previousWeightDelta[l][i] = new double[layerSize[l]];
                }
            }

            // Initialize the weights
            for (int l = 0; l < layerCount; l++)
            {
                BasePath = "NeuralNetwork/Weights/Layer[@Index='" + l.ToString() + "']/";
                for (int j = 0; j < layerSize[l]; j++)
                {
                    NodePath = "Node[@Index='" + j.ToString() + "']/@Bias";
                    double.TryParse(xPathValue(BasePath + NodePath), out value);

                    bias[l][j] = value;
                    previousBiasDelta[l][j] = 0.0;
                    layerOutput[l][j]       = 0.0;
                    layerInput[l][j]        = 0.0;
                    delta[l][j]             = 0.0;
                }

                for (int i = 0; i < (l == 0 ? inputSize : layerSize[l - 1]); i++)
                {
                    for (int j = 0; j < layerSize[l]; j++)
                    {
                        NodePath = "Node[@Index='" + j.ToString() + "']/Axon[@Index='" + i.ToString() + "']";
                        double.TryParse(xPathValue(BasePath + NodePath), out value);

                        weight[l][i][j] = value;
                        previousWeightDelta[l][i][j] = 0.0;
                    }
                }
            }

            // "release"
            doc = null;
        }
Exemplo n.º 10
0
        public BackPropagationNetwork(int[] layerSizes, TransferFunction[] transferFunctions)
        {
            // Validate the input data
            if (transferFunctions.Length != layerSizes.Length || transferFunctions[0] != TransferFunction.None)
            {
                throw new ArgumentException("Cannot construct a network with these parameters.");
            }

            // Initialize network layers
            layerCount = layerSizes.Length - 1;
            inputSize  = layerSizes[0];
            layerSize  = new int[layerCount];

            for (int i = 0; i < layerCount; i++)
            {
                layerSize[i] = layerSizes[i + 1];
            }

            transferFunction = new TransferFunction[layerCount];
            for (int i = 0; i < layerCount; i++)
            {
                transferFunction[i] = transferFunctions[i + 1];
            }

            // Start dimensioning arrays
            bias = new double[layerCount][];
            previousBiasDelta = new double[layerCount][];
            delta             = new double[layerCount][];
            layerOutput       = new double[layerCount][];
            layerInput        = new double[layerCount][];

            weight = new double[layerCount][][];
            previousWeightDelta = new double[layerCount][][];

            // Fill 2 dimensional arrays
            for (int l = 0; l < layerCount; l++)
            {
                bias[l] = new double[layerSize[l]];
                previousBiasDelta[l] = new double[layerSize[l]];
                delta[l]             = new double[layerSize[l]];
                layerOutput[l]       = new double[layerSize[l]];
                layerInput[l]        = new double[layerSize[l]];

                weight[l] = new double[l == 0 ? inputSize : layerSize[l - 1]][];
                previousWeightDelta[l] = new double[l == 0 ? inputSize : layerSize[l - 1]][];

                for (int i = 0; i < (l == 0 ? inputSize : layerSize[l - 1]); i++)
                {
                    weight[l][i] = new double[layerSize[l]];
                    previousWeightDelta[l][i] = new double[layerSize[l]];
                }
            }

            // Initialize the weights
            for (int l = 0; l < layerCount; l++)
            {
                for (int j = 0; j < layerSize[l]; j++)
                {
                    bias[l][j] = Gaussian.GetRandomGaussian();
                    previousBiasDelta[l][j] = 0.0;
                    layerOutput[l][j]       = 0.0;
                    layerInput[l][j]        = 0.0;
                    delta[l][j]             = 0.0;
                }

                for (int i = 0; i < (l == 0 ? inputSize : layerSize[l - 1]); i++)
                {
                    for (int j = 0; j < layerSize[l]; j++)
                    {
                        weight[l][i][j] = Gaussian.GetRandomGaussian();
                        previousWeightDelta[l][i][j] = 0.0;
                    }
                }
            }
        }