示例#1
0
        private void Test()
        {
            UnityEngine.Profiling.Profiler.BeginSample("Test");

            int correctTestLabels = 0;

            for (int i = 0; i < DataManager.Test.NumImgs; i++)
            {
                int lbl = (int)DataManager.Test.Labels[i];

                var handle = DataManager.CopyInput(_net.Inputs, DataManager.Test, i);
                handle = NeuralJobs.ForwardPass(_net, handle);
                handle.Complete();

                int predictedLbl = NeuralMath.ArgMax(_net.Last.Outputs);
                if (predictedLbl == lbl)
                {
                    correctTestLabels++;
                }
            }

            float accuracy = correctTestLabels / (float)DataManager.Test.NumImgs;

            Debug.Log("Test Accuracy: " + System.Math.Round(accuracy * 100f, 4) + "%");

            UnityEngine.Profiling.Profiler.EndSample();
        }
示例#2
0
        private void TrainMinibatch()
        {
            UnityEngine.Profiling.Profiler.BeginSample("TrainMiniBatch");

            float avgTrainCost = 0f;

            DataManager.GetBatch(_batch, DataManager.Train, ref _rng);

            var handle = NeuralJobs.ZeroGradients(_gradientsAvg);

            for (int i = 0; i < _batch.Length; i++)
            {
                handle = DataManager.CopyInput(_net.Inputs, DataManager.Train, _batch[i], handle);
                handle = NeuralJobs.ForwardPass(_net, handle);

                int lbl = (int)DataManager.Train.Labels[_batch[i]];
                handle.Complete();
                NeuralMath.ClassToOneHot(lbl, _targetOutputs); // Todo: job

                handle = NeuralJobs.BackwardsPass(_net, _gradients, _targetOutputs, handle);
                handle = NeuralJobs.AddGradients(_gradients, _gradientsAvg, handle);
                handle.Complete();

                // Todo: backwards pass logic now does this, don't redo, just check
                NeuralMath.Subtract(_targetOutputs, _net.Last.Outputs, _dCdO);
                float cost = NeuralMath.Cost(_dCdO);
                avgTrainCost += cost;
            }

            // Update weights and biases according to averaged gradient and learning rate
            _rate  = 3.0f / (float)BatchSize;
            handle = NeuralJobs.UpdateParameters(_net, _gradientsAvg, _rate, handle);
            handle.Complete(); // Todo: Is this one needed?

            _batchCount++;

            avgTrainCost /= (float)BatchSize;
            _trainingLoss = (float)System.Math.Round(avgTrainCost, 6);

            UnityEngine.Profiling.Profiler.EndSample();
        }