Exemplo n.º 1
0
 public Synapse(Neuron _input, Neuron _output)
 {
     InputNeuron  = _input;
     OutPutNeuron = _output;
     Weight       = NeuralMath.GetRandomValue(true);
     WeightDelta  = 0;
 }
 /// <summary>
 /// Update the weights of the InputSynapses in order to gain in precision
 /// </summary>
 /// <param name="_learnRate">learning rate of the Neural Network</param>
 /// <param name="_targetedValue">Targeter value of the output of this neuron</param>
 public IEnumerator UpdateSynapses(float _learnRate, float _targetedValue)
 {
     LocalGradient = NeuralMath.SigmoidDerivative(Value) * (Value - _targetedValue);
     foreach (Synapse synapse in InputSynapses)
     {
         synapse.WeightDelta = synapse.Weight - (_learnRate * (synapse.InputNeuron.Value * LocalGradient));
         yield return(new WaitForEndOfFrame());
     }
 }
Exemplo n.º 3
0
 internal NeuralLayer(uint synapsesCount, uint neuronsCount, bool isBiasNeuron, InitializerWeights weightsInitializer, InitializerBias biasInitializer)
 {
     _SynapsesCount = synapsesCount;
     _NeuronsCount  = neuronsCount;
     _NeuronWeights = NeuralMath.InitializeNeuronWeights(_SynapsesCount, _NeuronsCount, weightsInitializer);
     if (isBiasNeuron)
     {
         _BiasWeights = NeuralMath.InitializeBiasWeights(neuronsCount, biasInitializer);
     }
 }
Exemplo n.º 4
0
    private void Awake()
    {
        _rng = new Rng(1234);

        DataManager.LoadFloatData();

        // Create convolution layers

        _layers = new List <ConvLayer2D>();
        var l1 = ConvLayer2D.Create(DataManager.Width, DataManager.Channels, 5, 1, 0, 4).Value;

        _layers.Add(l1);
        var l2 = ConvLayer2D.Create(l1.OutWidth, l1.NumFilters, 3, 3, 0, 4).Value;

        _layers.Add(l2);
        var l3 = ConvLayer2D.Create(l2.OutWidth, l2.NumFilters, 3, 1, 0, 4).Value;

        _layers.Add(l3);

        var last         = l3;
        int convOutCount = last.OutWidth * last.OutWidth * last.NumFilters;

        Debug.Log("Conv out neuron count: " + convOutCount);

        _fcLayer = new FCLayer(10, convOutCount);

        // Parameter initialization

        for (int i = 0; i < _layers.Count; i++)
        {
            NeuralMath.RandomGaussian(ref _rng, _layers[i].Kernel, 0f, 0.25f);
            NeuralMath.RandomGaussian(ref _rng, _layers[i].Bias, 0f, 0.1f);
        }

        NeuralMath.RandomGaussian(ref _rng, _fcLayer.Biases, 0f, 0.1f);
        NeuralMath.RandomGaussian(ref _rng, _fcLayer.Weights, 0f, 0.1f);

        // Create debug textures

        _layerTex = new List <Conv2DLayerTexture>(_layers.Count);
        for (int i = 0; i < _layers.Count; i++)
        {
            _layerTex.Add(new Conv2DLayerTexture(_layers[i]));
        }

        // Create the training structure

        _batch         = new NativeArray <int>(BatchSize, Allocator.Persistent, NativeArrayOptions.ClearMemory);
        _targetOutputs = new NativeArray <float>(OutputClassCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory);
        _dCdO          = new NativeArray <float>(OutputClassCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory);
        _input         = new NativeArray <float>(DataManager.ImgDims * DataManager.Channels, Allocator.Persistent, NativeArrayOptions.UninitializedMemory);
    }
Exemplo n.º 5
0
    private void TrainMinibatch()
    {
        UnityEngine.Profiling.Profiler.BeginSample("TrainMiniBatch");

        float avgTrainCost = 0f;

        DataManager.GetBatch(_batch, DataManager.TrainFloats, ref _rng);

        // var h = NeuralJobs.ZeroGradients(_gradientsAvg);
        var h = new JobHandle();

        for (int i = 0; i < _batch.Length; i++)
        {
            h = DataManager.CopyInput(_input, DataManager.TrainFloats, _batch[i], h);
            h = ConvolutionJobs.ForwardPass(_input, _layers, h);
            h = ConvolutionJobs.ForwardPass(_layers[_layers.Count - 1].output, _fcLayer, h);

            int targetLbl = (int)DataManager.TrainFloats.Labels[_batch[i]];
            h.Complete();
            NeuralMath.ClassToOneHot(targetLbl, _targetOutputs); // Todo: job

            // handle = NeuralJobs.BackwardsPass(_net, _gradients, _inputs, _targetOutputs, handle);
            // handle = NeuralJobs.AddGradients(_gradients, _gradientsAvg, handle);
            // h.Complete();

            // Todo: backwards pass logic now does this, don't redo, just check
            NeuralMath.Subtract(_targetOutputs, _fcLayer.Outputs, _dCdO);
            float cost = NeuralMath.Cost(_dCdO);
            avgTrainCost += cost;

            int predictedLbl = NeuralMath.ArgMax(_fcLayer.Outputs);
            Debug.Log("Prediction: " + predictedLbl);
        }

        // Update weights and biases according to averaged gradient and learning rate
        _rate = 3.0f / (float)BatchSize;
        // handle = NeuralJobs.UpdateParameters(_net, _gradientsAvg, _rate, handle);
        h.Complete(); // Todo: Is this one needed?

        _batchCount++;

        avgTrainCost /= (float)BatchSize;
        _trainingLoss = (float)System.Math.Round(avgTrainCost, 6);

        UnityEngine.Profiling.Profiler.EndSample();
    }
Exemplo n.º 6
0
    private AICar TournamentSelection(AICar[] population)
    {
        AICar[] tournament = new AICar[TournamentSize];
        for (int i = 0; i < TournamentSize; i++)
        {
            tournament[i] = population[(int)(NeuralMath.RandomDouble() * PopulationCount)];
        }
        AICar fittestCar = tournament[0];

        for (int i = 1; i < TournamentSize; i++)
        {
            if (tournament[i].Fitness > fittestCar.Fitness)
            {
                fittestCar = tournament[i];
            }
        }
        return(fittestCar);
    }
Exemplo n.º 7
0
        public void GetErrorsOutputSignal()
        {
            //Arrange
            float[][] outputSignal = new float[][]
            {
                new float[] { 1 },
                new float[] { 0 },
                new float[] { 0 },
                new float[] { 1 }
            };

            float[][] actualSignal = new float[][]
            {
                new float[] { 1 },
                new float[] { 0 },
                new float[] { 0 },
                new float[] { 1 }
            };

            float[][] expected = new float[][]
            {
                new float[] { 0 },
                new float[] { 0 },
                new float[] { 0 },
                new float[] { 0 }
            };

            //Act
            var actual = NeuralMath.GetTotalLoss(outputSignal, actualSignal, LossFunc.MSE);

            //Assert
            for (int i = 0; i < expected.Length; i++)
            {
                for (int j = 0; j < expected[i].Length; j++)
                {
                    Assert.AreEqual(expected[i][j], actual);
                }
            }
        }
Exemplo n.º 8
0
        public float CalculateError(float[][] inputSignals, float[][] expectedSignals)
        {
            var _outputSignals = OnActivationNeuralNet(inputSignals).OutputsSignalsPrevLayer;

            return(NeuralMath.GetTotalLoss(_outputSignals, expectedSignals, LossFunc));
        }
 /// <summary>
 /// Calculate the value of the neuron based on the weights and values of the Previous Layer
 /// VALUE = SIGMOIDSQUISH(WEIGHT * VALUE) of every previous neuron + bias
 /// </summary>
 /// <returns></returns>
 public float CalculateValue()
 {
     return(Value = NeuralMath.SigmoidSquish(InputSynapses.Sum(s => s.Weight * s.InputNeuron.Value) + Bias));
 }