Beispiel #1
0
 // This constructor should only be used for input layer
 protected LayerBase(Shape[] inputShapes, Shape outputShape, ActivationFunc activation = null)
 {
     InputShapes = inputShapes;
     OutputShape = outputShape;
     Activation  = activation;
     Name        = GenerateName();
 }
Beispiel #2
0
        internal static float GetApproximateByFunction(float variableOfApproximate, ActivationFunc activationFunc)
        {
            float _result = 0;

            switch (activationFunc)
            {
            case ActivationFunc.Identity:
                _result = variableOfApproximate;
                break;

            case ActivationFunc.Sigmoid:
                _result = (float)(1 / (1 + Math.Exp(-variableOfApproximate)));
                break;

            case ActivationFunc.TanH:
                _result = (float)((Math.Exp(2 * variableOfApproximate) - 1) / (Math.Exp(2 * variableOfApproximate) + 1));
                break;

            case ActivationFunc.ReLU:
                _result = variableOfApproximate > 0 ? variableOfApproximate : 0;
                break;

            case ActivationFunc.Gaussian:
                _result = (float)Math.Exp(-Math.Pow(variableOfApproximate, 2));
                break;
            }
            return(_result);
        }
        public INeuralNetworkBuilderFinal CreateOutputLayer(int neuronCount, ActivationFunc activationFunc)
        {
            int previousLayerNeuronCount = BuildingNet.NeuralLayers.Count == 0 ? InputCount : BuildingNet.NeuralLayers[BuildingNet.NeuralLayers.Count - 1].NeuronLength;

            BuildingNet.NeuralLayers.Add(new NeuralLayer(activationFunc, InitializationFunction, previousLayerNeuronCount, neuronCount));
            return(this);
        }
Beispiel #4
0
        internal static float GetDerivativeByFunction(float variableOfDerivative, ActivationFunc activationFunc)
        {
            float _result = 0;

            switch (activationFunc)
            {
            case ActivationFunc.Identity:
                _result = 1;
                break;

            case ActivationFunc.Sigmoid:
                _result = (1 - variableOfDerivative) * variableOfDerivative;
                break;

            case ActivationFunc.TanH:
                _result = (float)(1 - Math.Pow(variableOfDerivative, 2));
                break;

            case ActivationFunc.ReLU:
                _result = variableOfDerivative > 0 ? 1 : 0;
                break;

            case ActivationFunc.Gaussian:
                _result = (float)(-2 * variableOfDerivative * Math.Exp(-Math.Pow(variableOfDerivative, 2)));
                break;
            }
            return(_result);
        }
Beispiel #5
0
        public NeuralNetwork(int inputNodes, int hiddenNodes, int outputNodes, double learningRate)
        {
            //Колво узлов
            this.inputNodes  = inputNodes;
            this.hiddenNodes = hiddenNodes;
            this.outputNodes = outputNodes;

            //Коэффицент обучения
            this.learningRate = learningRate;

            //Матрицы весовых коэффицентов
            wih = np.random.normal(0.0, Math.Pow(hiddenNodes, -0.5), hiddenNodes, inputNodes);
            who = np.random.normal(0.0, Math.Pow(outputNodes, -0.5), outputNodes, hiddenNodes);

            //Функция активация: здесь сигмоида 1 / (1 + e ^ -x)

            activationFunc = x => {
                double[,] arr = new double[x.shape[0], x.shape[1]];
                for (int i = 0; i < arr.GetLength(0); i++)
                {
                    for (int j = 0; j < arr.GetLength(1); j++)
                    {
                        arr[i, j] = 1.0 / (1 + Math.Exp(-1.0 * (double)x[i, j]));
                    }
                }
                return(arr);
            };
        }
Beispiel #6
0
 protected LayerBase(LayerBase[] inputLayers, Shape outputShape, ActivationFunc activation = null)
     : this(inputLayers.Select(l => l.OutputShape).ToArray(), outputShape, activation)
 {
     InputLayers.AddRange(inputLayers);
     foreach (var inLayer in inputLayers)
     {
         inLayer.OutputLayers.Add(this);
     }
 }
Beispiel #7
0
 public static Matrix Map(Matrix a, ActivationFunc f)
 {
     for (int i = 0; i < a.rows; i++)
     {
         for (int j = 0; j < a.cols; j++)
         {
             a.data[i, j] = f(a.data[i, j]);
         }
     }
     return(a);
 }
Beispiel #8
0
        public static void UseEngine(IBackend backend, DeviceType deviceType, bool cudnn = false)
        {
            _backend = backend;
            ActFunc  = backend.GetActFunc();
            if (cudnn && deviceType != DeviceType.CUDA)
            {
                throw new ArgumentException("CuDnn work with CUDA device type");
            }

            UseCudnn = cudnn;
            _backend.SetDevice(deviceType);
        }
Beispiel #9
0
        public float[][][] Backprop(float[][][] derivatives)
        {
            //i is depth in output
            for (int i = 0; i < Depth; i++)
            {
                //y is part of position in output
                for (int y = 0; y < OutputSideLength; y++)
                {
                    //x is part of position in output
                    for (int x = 0; x < OutputSideLength; x++)
                    {
                        PartialDerivative[i][y][x] *= (float)ActivationFunc.Derivative(ReallyLastIns[i][y][x]);
                        BiasChanges[i]             += PartialDerivative[i][y][x];
                        //j is depth in input
                        for (int j = 0; j < ExpectedInputDepth; j++)
                        {
                            //k is part of position in filter
                            for (int k = 0; k < FilterSideLength; k++)
                            {
                                int absY = y * StrideLength + k;
                                if (absY < ZeroPaddingSize || absY >= ExpectedInputWidth + ZeroPaddingSize)
                                {
                                    continue;
                                }
                                //l is part of position in filter
                                for (int l = 0; l < FilterSideLength; l++)
                                {
                                    int absX = x * StrideLength + l;
                                    if (absX < ZeroPaddingSize || absX >= ExpectedInputWidth + ZeroPaddingSize)
                                    {
                                        continue;
                                    }
                                    DErrorDWeight[i][j][k][l] += PartialDerivative[j][y][x] * LastIns[j][absY][absX];
                                }
                            }
                        }
                    }
                }
            }


            //TODO: Bias gradient descent.
            //Also TODO: Test.

            //https://medium.com/@2017csm1006/forward-and-backpropagation-in-convolutional-neural-network-4dfa96d7b37e

            return(PartialDerivative);
        }
 public static ActivationFunction Create(ActivationFunc ac)
 {
     switch (ac)
     {
         case ActivationFunc.ARCTAN:
             return new ArcTan();
         case ActivationFunc.GAUSSIAN:
             return new Gaussian();
         case ActivationFunc.TANH:
             return new TanhFunction();
         case ActivationFunc.SINC:
             return new Sinc();
         default:
             return new SigmoidFunction();
     }
 }
Beispiel #11
0
        public double Compute(double[] inputs)
        {
            //Will Only Run When In Debug Mode
            CheckInputLength(inputs);

            double output = 0;

            for (int i = 0; i < InputDendrites.Length; i++)
            {
                output += inputs[i] * InputDendrites[i];
            }
            Input = output + BiasValue;

            //Run It Through The Activation Function
            Output = ActivationFunc.Function(Input);
            return(Output);
        }
        public void CrossOverAndMutate(GeneticNeuralNetwork BetterNetwork, double MutationRate, Random Rand)
        {
            Parallel.For(0, NeuralLayers.Count, i =>
            {
                //Cross Over The Neurons From Each Layer At Given Cut Off Point
                int Flip     = Rand.Next(2);
                int CutPoint = Rand.Next(NeuralLayers[i].NeuronLength);
                for (int j = Flip == 0 ? 0 : CutPoint; j < (Flip == 0 ? CutPoint : NeuralLayers[i].NeuronLength); j++)
                {
                    //Get The Neurons
                    Neuron CurrentNeuron       = NeuralLayers[i][j];
                    Neuron BetterNetworkNeuron = BetterNetwork.NeuralLayers[i][j];

                    for (int h = 0; h < CurrentNeuron.InputDendrites.Length; h++)
                    {
                        CurrentNeuron.InputDendrites[h] = BetterNetworkNeuron.InputDendrites[h];
                    }
                    CurrentNeuron.BiasValue = (BetterNetworkNeuron.BiasValue + CurrentNeuron.BiasValue) / 2;
                }

                //Mutate The Crossed Over Neurons
                for (int j = 0; j < NeuralLayers[i].NeuronLength; j++)
                {
                    ActivationFunc activationFunc = NeuralLayers[i].ActivationFunc;
                    Neuron CurrentNeuron          = NeuralLayers[i][j];
                    for (int h = 0; h < CurrentNeuron.InputDendrites.Length; h++)
                    {
                        if (Rand.NextDouble() < MutationRate)
                        {
                            Mutate(activationFunc, ref CurrentNeuron.InputDendrites[h], Rand);
                        }
                    }

                    //Mutate The Bias
                    if (Rand.NextDouble() < MutationRate)
                    {
                        Mutate(activationFunc, ref CurrentNeuron.BiasValue, Rand);
                    }
                }
            });
        }
        private void Mutate(ActivationFunc activationFunc, ref double weight, Random Rand)
        {
            switch (Rand.Next(4))
            {
            case 0:     // randomize
                weight = Rand.NextDouble(activationFunc.DendriteMinGen, activationFunc.DendriteMaxGen);
                break;

            case 1:     // add/subtract
                weight += Rand.NextDouble(-1, 1);
                break;

            case 2:     // flip sign
                weight *= -1;
                break;

            default:
            case 3:     // scale
                weight *= Rand.NextDouble(0.5, 1.5);
                break;
            }
        }
        internal static float[] GetDeltasOutputLayer(float[] outputSignals, float[] expectedSignals, ActivationFunc activationFunc)
        {
            int _neuronsCountThisLayer = outputSignals.Length;

            float[] _deltasErrors = new float[_neuronsCountThisLayer];
            float   _errorOutSignal;
            float   _derivativeActivFunc;

            for (int _neuron = 0; _neuron < _neuronsCountThisLayer; _neuron++)
            {
                _errorOutSignal        = expectedSignals[_neuron] - outputSignals[_neuron];
                _derivativeActivFunc   = NeuralMath.GetDerivativeByFunction(outputSignals[_neuron], activationFunc);
                _deltasErrors[_neuron] = _errorOutSignal * _derivativeActivFunc;
            }

            return(_deltasErrors);
        }
Beispiel #15
0
 public void Activate()
 {
     Output = ActivationFunc.Activate(Input);
 }
        internal static float[] GetDeltasHiddenLayer(float[] outputSignals, float[][] weightsNextLayer, float[] deltasErrorsNextLayer, ActivationFunc activationFunc)
        {
            int _neuronsCountThisLayer = outputSignals.Length;
            int _neuronsCountNextLayer = deltasErrorsNextLayer.Length;

            float[] _deltasErrors = new float[outputSignals.Length];
            float   _sumWeightsAndDeltasNextLayer;
            float   _derivativeActivFunc;

            for (int _neuron = 0; _neuron < _neuronsCountThisLayer; _neuron++)
            {
                _sumWeightsAndDeltasNextLayer = GetSumWeightsAndDeltasNextLayer(_neuron);
                _derivativeActivFunc          = NeuralMath.GetDerivativeByFunction(outputSignals[_neuron], activationFunc);
                _deltasErrors[_neuron]        = _sumWeightsAndDeltasNextLayer * _derivativeActivFunc;
            }

            return(_deltasErrors);

            float GetSumWeightsAndDeltasNextLayer(int neuronPrevLayer)
            {
                float _sumWeightsAndDeltas = 0;

                for (int _neuronNextLayer = 0; _neuronNextLayer < _neuronsCountNextLayer; _neuronNextLayer++)
                {
                    _sumWeightsAndDeltas += deltasErrorsNextLayer[_neuronNextLayer] * weightsNextLayer[neuronPrevLayer][_neuronNextLayer];
                }
                return(_sumWeightsAndDeltas);
            }
        }
Beispiel #17
0
 public static double RandomInitialization(ActivationFunc activationFunc, Random rand)
 {
     return(rand.NextDouble(activationFunc.DendriteMinGen, activationFunc.DendriteMaxGen));
 }
Beispiel #18
0
 public void SigmoidTest()
 {
     Assert.AreEqual(0.5D, ActivationFunc.Sigmoid(0));
 }
Beispiel #19
0
 // The concept of layer is that it is a 'block box' that supports feed forward and backward propagation.
 // Feed forward: input Tensor -> |logic| -> output Tensor
 // Back propagation: error gradients (for its outputs) -> |learning| -> error gradients (for predecessing layer outputs) and internal parameters deltas
 // These error gradients are always of the same size as respective outputs and are saying now much each output
 // contributed to the final error)
 protected LayerBase(LayerBase inputLayer, Shape outputShape, ActivationFunc activation = null)
     : this(new [] { inputLayer.OutputShape }, outputShape, activation)
 {
     InputLayers.Add(inputLayer);
     inputLayer.OutputLayers.Add(this);
 }
Beispiel #20
0
        internal static float[] GetOutputSignalOfSample(float[] inputSignal, float[][] neuronWeights, ActivationFunc activationFunc, float[] isBiasNeuron)
        {
            int _synapsesCount = neuronWeights.Length;
            int _neuronsCount  = neuronWeights[0].Length;

            float[]   _outputSignals  = new float[_neuronsCount];
            float[][] _neuronWeightsT = TransposeArray(neuronWeights);

            for (int neuron = 0; neuron < _neuronsCount; neuron++)
            {
                float _inputSignalForNeuron = 0;
                for (int synapse = 0; synapse < _synapsesCount; synapse++)
                {
                    _inputSignalForNeuron += inputSignal[synapse] * _neuronWeightsT[neuron][synapse] + (isBiasNeuron != null ? isBiasNeuron[neuron] : 0);
                }
                _outputSignals[neuron] = NeuralMath.GetApproximateByFunction(_inputSignalForNeuron, activationFunc);
            }

            return(_outputSignals);
        }
Beispiel #21
0
 public Dense(LayerBase inputLayer, int outputs, ActivationFunc activation = null)
     : base(inputLayer, new Shape(1, outputs), activation)
 {
 }
Beispiel #22
0
 // Use this constructor for input layer only!
 public Dense(int inputs, int outputs, ActivationFunc activation = null)
     : base(new Shape(1, inputs), new Shape(1, outputs), activation)
 {
 }
Beispiel #23
0
 public Merge(LayerBase[] inputLayers, Mode mergeMode, ActivationFunc activation = null)
     : base(inputLayers, inputLayers[0].OutputShape, activation)
 {
     MergeMode = mergeMode;
 }
Beispiel #24
0
 // This constructor should only be used for input layer
 public Merge(Shape[] inputShapes, Mode mergeMode, ActivationFunc activation = null)
     : base(inputShapes, inputShapes[0], activation)
 {
     MergeMode = mergeMode;
 }
Beispiel #25
0
 public Neuron(ActivationFunc activationFunc, InitializationFunction initializationFunction, int inputCount)
 {
     ActivationFunc     = activationFunc;
     InputDendrites     = new double[inputCount];
     InitializationFunc = initializationFunction;
 }
Beispiel #26
0
 // This constructor should only be used for input layer
 public Lambda(Shape[] inputShapes, Shape outputShape, LambdaFunc processInputsFunc, LambdaBackpropFunc backPropOutputGradientFunc, ActivationFunc activation = null)
     : base(inputShapes, outputShape, activation)
 {
     ProcessInputsFunc          = processInputsFunc;
     BackPropOutputGradientFunc = backPropOutputGradientFunc;
 }
Beispiel #27
0
 // This constructor should only be used for input layer
 public Convolution(Shape inputShape, int filterSize, int filtersNum, int stride, ActivationFunc activation)
     : base(inputShape, GetOutShape(inputShape, filterSize, filterSize, stride, filtersNum), activation)
 {
     FilterSize = filterSize;
     FiltersNum = filtersNum;
     Stride     = stride;
 }
Beispiel #28
0
 public Concatenate(LayerBase[] inputLayers, ActivationFunc activation = null)
     : base(inputLayers, new Shape(1, inputLayers.Select(x => x.OutputShape.Length).Sum()))
 {
 }
Beispiel #29
0
        internal static float[][] GetOutputSignalsOfAllSamples(float[][] inputSignals, float[][] neuronWeights, ActivationFunc activationFunc, float[] isBiasNeuron)
        {
            int _synapsesCount = neuronWeights.Length;
            int _neuronsCount  = neuronWeights[0].Length;

            float[][] _outputSignals  = new float[inputSignals.Length][];
            float[][] _neuronWeightsT = TransposeArray(neuronWeights);

            Parallel.For(0, inputSignals.Length, numberOfActiveDataset =>
            {
                float _inputSignalForNeuron;
                if (_outputSignals[numberOfActiveDataset] == null)
                {
                    _outputSignals[numberOfActiveDataset] = new float[_neuronsCount];
                }

                for (int neuron = 0; neuron < _neuronsCount; neuron++)
                {
                    _inputSignalForNeuron = SumAllSynapseSignals(numberOfActiveDataset, neuron, _synapsesCount);
                    _outputSignals[numberOfActiveDataset][neuron] = NeuralMath.GetApproximateByFunction(_inputSignalForNeuron, activationFunc);
                }
            });

            return(_outputSignals);

            float SumAllSynapseSignals(int currentSet, int neuron, int synapsesOfNeuron)
            {
                float _inputSignal = 0;

                for (int synapse = 0; synapse < synapsesOfNeuron; synapse++)
                {
                    _inputSignal += inputSignals[currentSet][synapse] * _neuronWeightsT[neuron][synapse] + (isBiasNeuron != null ? isBiasNeuron[neuron] : 0);
                }
                return(_inputSignal);
            }
        }
Beispiel #30
0
 public Builder SetActivationFunc(ActivationFunc activationFunc)
 {
     ActivationFunc = activationFunc;
     return(this);
 }
Beispiel #31
0
 // This constructor should only be used for input layer
 protected LayerBase(Shape inputShape, Shape outputShape, ActivationFunc activation = null)
     : this(new[] { inputShape }, outputShape, activation)
 {
 }