예제 #1
0
        private void Test()
        {
            UnityEngine.Profiling.Profiler.BeginSample("Test");

            int correctTestLabels = 0;

            for (int i = 0; i < DataManager.Test.NumImgs; i++)
            {
                int lbl = (int)DataManager.Test.Labels[i];

                var handle = DataManager.CopyInput(_net.Inputs, DataManager.Test, i);
                handle = NeuralJobs.ForwardPass(_net, handle);
                handle.Complete();

                int predictedLbl = NeuralMath.ArgMax(_net.Last.Outputs);
                if (predictedLbl == lbl)
                {
                    correctTestLabels++;
                }
            }

            float accuracy = correctTestLabels / (float)DataManager.Test.NumImgs;

            Debug.Log("Test Accuracy: " + System.Math.Round(accuracy * 100f, 4) + "%");

            UnityEngine.Profiling.Profiler.EndSample();
        }
예제 #2
0
        private void OnDestroy()
        {
            DataManager.Unload();

            _net.Dispose();
            _gradients.Dispose();
            _gradientsAvg.Dispose();

            _batch.Dispose();
            _targetOutputs.Dispose();
            _dCdO.Dispose();
        }
예제 #3
0
        private void Awake()
        {
            Application.runInBackground = true;

            DataManager.Load();

            _rng = new Rng(1234);

            var config = new FCNetworkConfig();

            config.Layers.Add(new FCLayerConfig {
                NumNeurons = DataManager.ImgDims * DataManager.Channels
            });
            config.Layers.Add(new FCLayerConfig {
                NumNeurons = 40
            });
            config.Layers.Add(new FCLayerConfig {
                NumNeurons = 20
            });
            config.Layers.Add(new FCLayerConfig {
                NumNeurons = 10
            });

            _net = new FCNetwork(config);
            NeuralUtils.Initialize(_net, ref _rng);

            _gradients    = new FCGradients(config);
            _gradientsAvg = new FCGradients(config);

            _batch         = new NativeArray <int>(BatchSize, Allocator.Persistent, NativeArrayOptions.ClearMemory);
            _targetOutputs = new NativeArray <float>(OutputClassCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory);
            _dCdO          = new NativeArray <float>(OutputClassCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory);

            _watch = System.Diagnostics.Stopwatch.StartNew();

            int testImgIdx = 8392;

            _lbl = DataManager.Test.Labels[testImgIdx];
            _img = new Texture2D(32, 32, TextureFormat.ARGB32, false, true);
            DataManager.ToTexture(DataManager.Test, testImgIdx, _img);

            Test();
        }
예제 #4
0
        private void TrainMinibatch()
        {
            UnityEngine.Profiling.Profiler.BeginSample("TrainMiniBatch");

            float avgTrainCost = 0f;

            DataManager.GetBatch(_batch, DataManager.Train, ref _rng);

            var handle = NeuralJobs.ZeroGradients(_gradientsAvg);

            for (int i = 0; i < _batch.Length; i++)
            {
                handle = DataManager.CopyInput(_net.Inputs, DataManager.Train, _batch[i], handle);
                handle = NeuralJobs.ForwardPass(_net, handle);

                int lbl = (int)DataManager.Train.Labels[_batch[i]];
                handle.Complete();
                NeuralMath.ClassToOneHot(lbl, _targetOutputs); // Todo: job

                handle = NeuralJobs.BackwardsPass(_net, _gradients, _targetOutputs, handle);
                handle = NeuralJobs.AddGradients(_gradients, _gradientsAvg, handle);
                handle.Complete();

                // Todo: backwards pass logic now does this, don't redo, just check
                NeuralMath.Subtract(_targetOutputs, _net.Last.Outputs, _dCdO);
                float cost = NeuralMath.Cost(_dCdO);
                avgTrainCost += cost;
            }

            // Update weights and biases according to averaged gradient and learning rate
            _rate  = 3.0f / (float)BatchSize;
            handle = NeuralJobs.UpdateParameters(_net, _gradientsAvg, _rate, handle);
            handle.Complete(); // Todo: Is this one needed?

            _batchCount++;

            avgTrainCost /= (float)BatchSize;
            _trainingLoss = (float)System.Math.Round(avgTrainCost, 6);

            UnityEngine.Profiling.Profiler.EndSample();
        }