Ejemplo n.º 1
0
        /// Generates samples with weightings that are integral and compares that to the unweighted statistics result. Doesn't correspond with the
        /// higher order sample statistics because our weightings represent reliability weights, *not* frequency weights, and the Bessel correction is
        /// calculated appropriately - so don't let the construction of the test mislead you.
        public void ConsistentWithUnweighted(string dataSet)
        {
            var data    = _data[dataSet].Data.ToArray();
            var gen     = new DiscreteUniform(1, 5);
            var weights = new int[data.Length];

            gen.Samples(weights);

            var stats  = new RunningWeightedStatistics(data.Select((x, i) => System.Tuple.Create((double)weights[i], x)));
            var stats2 = new RunningStatistics();

            for (int i = 0; i < data.Length; ++i)
            {
                for (int j = 0; j < weights[i]; ++j)
                {
                    stats2.Push(data[i]);
                }
            }
            var sumWeights = weights.Sum();

            Assert.That(stats.TotalWeight, Is.EqualTo(sumWeights), "TotalWeight");
            Assert.That(stats.Count, Is.EqualTo(weights.Length), "Count");
            Assert.That(stats2.Minimum, Is.EqualTo(stats.Minimum), "Minimum");
            Assert.That(stats2.Maximum, Is.EqualTo(stats.Maximum), "Maximum");
            Assert.That(stats2.Mean, Is.EqualTo(stats.Mean).Within(1e-8), "Mean");
            Assert.That(stats2.PopulationVariance, Is.EqualTo(stats.PopulationVariance).Within(1e-9), "PopulationVariance");
            Assert.That(stats2.PopulationStandardDeviation, Is.EqualTo(stats.PopulationStandardDeviation).Within(1e-9), "PopulationStandardDeviation");
            Assert.That(stats2.PopulationSkewness, Is.EqualTo(stats.PopulationSkewness).Within(1e-8), "PopulationSkewness");
            Assert.That(stats2.PopulationKurtosis, Is.EqualTo(stats.PopulationKurtosis).Within(1e-8), "PopulationKurtosis");
        }
Ejemplo n.º 2
0
        public async Task TestUniformDistribution01()
        {
            const double A        = 0.0;
            const double B        = 1.0;
            const double MEAN     = 0.5 * (A + B);
            const double VARIANCE = (1.0 / 12.0) * (B - A) * (B - A);

            var stats = new RunningStatistics();
            var fra   = new FrequencyAnalysis();

            using var rng = new MultiThreadedRng();

            for (var n = 0; n < 100_000; n++)
            {
                var value = await rng.GetUniform();

                stats.Push(value);
                fra.CountThis(value);
            }

            fra.NormalizeAndPlotEvents(TestContext.WriteLine);
            fra.PlotOccurence(TestContext.WriteLine);
            TestContext.WriteLine($"mean={MEAN} vs. {stats.Mean}");
            TestContext.WriteLine($"variance={VARIANCE} vs {stats.Variance}");

            Assert.That(stats.Mean, Is.EqualTo(MEAN).Within(0.01), "Mean is out of range");
            Assert.That(stats.Variance, Is.EqualTo(VARIANCE).Within(0.001), "Variance is out of range");
        }
Ejemplo n.º 3
0
        public async Task TestNormalDistribution01()
        {
            const float MEAN = 0.5f;
            const float STANDARD_DEVIATION = 0.2f;

            using var rng = new MultiThreadedRng();
            var dist  = new FastRng.Float.Distributions.NormalS02M05(rng);
            var stats = new RunningStatistics();
            var fra   = new FrequencyAnalysis();

            for (var n = 0; n < 100_000; n++)
            {
                var nextNumber = await dist.NextNumber();

                stats.Push(nextNumber);
                fra.CountThis(nextNumber);
            }

            fra.NormalizeAndPlotEvents(TestContext.WriteLine);

            TestContext.WriteLine($"mean={MEAN} vs. {stats.Mean}");
            TestContext.WriteLine($"variance={STANDARD_DEVIATION * STANDARD_DEVIATION} vs {stats.Variance}");

            Assert.That(stats.Mean, Is.EqualTo(MEAN).Within(0.01f), "Mean is out of range");
            Assert.That(stats.Variance, Is.EqualTo(STANDARD_DEVIATION * STANDARD_DEVIATION).Within(0.01f), "Variance is out of range");
        }
        public static void TestSimpleStats(ISampler <double> sampler)
        {
            const int sampleCount = 20_000_000;

            RunningStatistics runningStats = new RunningStatistics();

            for (int i = 0; i < sampleCount; i++)
            {
                runningStats.Push(sampler.Sample());
            }

            Assert.True(Math.Abs(runningStats.Mean) < 0.001);
            Assert.True(Math.Abs(runningStats.StandardDeviation - 1.0) < 0.0005);
            Assert.True(Math.Abs(runningStats.Skewness) < 0.01);
            Assert.True(Math.Abs(runningStats.Kurtosis) < 0.01);
        }
Ejemplo n.º 5
0
        public double[] GatedMeanAndUncertainty(double startTime, double endTime)
        {
            double[] mne = new double[2];

            double[] trimmedGates = TrimGates(startTime, endTime);
            if (trimmedGates == null)
            {
                return(null);
            }
            startTime = trimmedGates[0];
            endTime   = trimmedGates[1];

            int low  = (int)Math.Ceiling((startTime - gateStartTime) / clockPeriod);
            int high = (int)Math.Floor((endTime - gateStartTime) / clockPeriod);

            // check the range is sensible
            if (low < 0)
            {
                low = 0;
            }
            if (high > this.Length - 1)
            {
                high = this.Length - 1;
            }
            if (low > high)
            {
                return(null);
            }

            RunningStatistics stats = new RunningStatistics();

            for (int i = low; i <= high; i++)
            {
                stats.Push(tofData[i]);
            }

            mne[0] = stats.Mean;
            mne[1] = stats.StandardErrorOfSampleMean;
            return(mne);
        }
Ejemplo n.º 6
0
        public DemodulatedBlock DemodulateBlock(Block b, DemodulationConfig demodulationConfig, int[] tofChannelsToAnalyse)
        {
            if (!b.detectors.Contains("asymmetry"))
            {
                b.AddDetectorsToBlock();
            }

            int blockLength = b.Points.Count;

            DemodulatedBlock db = new DemodulatedBlock(b.TimeStamp, b.Config, demodulationConfig);

            Dictionary <string, double[]> pointDetectorData = new Dictionary <string, double[]>();

            foreach (string d in demodulationConfig.GatedDetectors)
            {
                pointDetectorData.Add(d, GetGatedDetectorData(b, d, demodulationConfig.Gates.GetGate(d)));
            }
            foreach (string d in demodulationConfig.PointDetectors)
            {
                pointDetectorData.Add(d, GetPointDetectorData(b, d));
            }

            Dictionary <string, TOF[]> tofDetectorData = new Dictionary <string, TOF[]>();

            foreach (string d in demodulationConfig.TOFDetectors)
            {
                tofDetectorData.Add(d, GetTOFDetectorData(b, d));
            }

            // ----Demodulate channels----
            // --Build list of modulations--
            List <Modulation> modulations = GetModulations(b);

            // --Work out switch state for each point--
            List <uint> switchStates = GetSwitchStates(modulations);

            // --Calculate state signs for each analysis channel--
            // The first index selects the analysis channel, the second the switchState
            int numStates = (int)Math.Pow(2, modulations.Count);

            int[,] stateSigns = GetStateSigns(numStates);

            // --This is done for each point/gated detector--
            foreach (string d in pointDetectorData.Keys)
            {
                int detectorIndex = b.detectors.IndexOf(d);

                // We obtain one Channel Set for each detector
                ChannelSet <PointWithError> channelSet = new ChannelSet <PointWithError>();

                // Detector calibration
                double calibration = ((TOF)((EDMPoint)b.Points[0]).Shot.TOFs[detectorIndex]).Calibration;

                // Divide points into bins depending on switch state
                List <List <double> > statePoints = new List <List <double> >(numStates);
                for (int i = 0; i < numStates; i++)
                {
                    statePoints.Add(new List <double>(blockLength / numStates));
                }
                for (int i = 0; i < blockLength; i++)
                {
                    statePoints[(int)switchStates[i]].Add(pointDetectorData[b.detectors[detectorIndex]][i]);
                }
                int subLength = blockLength / numStates;

                // For each analysis channel, calculate the mean and standard error, then add to ChannelSet
                for (int channel = 0; channel < numStates; channel++)
                {
                    RunningStatistics stats = new RunningStatistics();
                    for (int subIndex = 0; subIndex < subLength; subIndex++)
                    {
                        double onVal  = 0.0;
                        double offVal = 0.0;
                        for (int i = 0; i < numStates; i++)
                        {
                            if (stateSigns[channel, i] == 1)
                            {
                                onVal += statePoints[i][subIndex];
                            }
                            else
                            {
                                offVal += statePoints[i][subIndex];
                            }
                        }
                        onVal  /= numStates;
                        offVal /= numStates;
                        stats.Push(onVal - offVal);
                    }

                    PointWithError pointWithError = new PointWithError()
                    {
                        Value = stats.Mean, Error = stats.StandardErrorOfSampleMean
                    };

                    // add the channel to the ChannelSet
                    List <string> usedSwitches = new List <string>();
                    for (int i = 0; i < modulations.Count; i++)
                    {
                        if ((channel & (1 << i)) != 0)
                        {
                            usedSwitches.Add(modulations[i].Name);
                        }
                    }
                    string[] channelName = usedSwitches.ToArray();
                    // the SIG channel has a special name
                    if (channel == 0)
                    {
                        channelName = new string[] { "SIG" }
                    }
                    ;
                    channelSet.AddChannel(channelName, pointWithError);
                }

                // Add the ChannelSet to the demodulated block
                db.AddDetector(b.detectors[detectorIndex], calibration, channelSet);
            }

            // --This is done for each TOF detector--
            foreach (string d in tofDetectorData.Keys)
            {
                int detectorIndex = b.detectors.IndexOf(d);

                // We obtain one Channel Set for each detector
                ChannelSet <TOFWithError> channelSet = new ChannelSet <TOFWithError>();

                // Detector calibration
                double calibration = ((TOF)((EDMPoint)b.Points[0]).Shot.TOFs[detectorIndex]).Calibration;

                // Divide TOFs into bins depending on switch state
                List <List <TOF> > statePoints = new List <List <TOF> >(numStates);
                for (int i = 0; i < numStates; i++)
                {
                    statePoints.Add(new List <TOF>(blockLength / numStates));
                }
                for (int i = 0; i < blockLength; i++)
                {
                    statePoints[(int)switchStates[i]].Add(tofDetectorData[b.detectors[detectorIndex]][i]);
                }
                int subLength = blockLength / numStates;

                // For each analysis channel, calculate the mean and standard error, then add to ChannelSet
                foreach (int channel in tofChannelsToAnalyse)
                {
                    TOFAccumulator tofAccumulator = new TOFAccumulator();
                    for (int subIndex = 0; subIndex < subLength; subIndex++)
                    {
                        TOF onTOF  = new TOF();
                        TOF offTOF = new TOF();
                        for (int i = 0; i < numStates; i++)
                        {
                            if (stateSigns[channel, i] == 1)
                            {
                                onTOF += statePoints[i][subIndex];
                            }
                            else
                            {
                                offTOF += statePoints[i][subIndex];
                            }
                        }
                        onTOF  /= numStates;
                        offTOF /= numStates;
                        tofAccumulator.Add(onTOF - offTOF);
                    }

                    // add the channel to the ChannelSet
                    List <string> usedSwitches = new List <string>();
                    for (int i = 0; i < modulations.Count; i++)
                    {
                        if ((channel & (1 << i)) != 0)
                        {
                            usedSwitches.Add(modulations[i].Name);
                        }
                    }
                    string[] channelName = usedSwitches.ToArray();
                    // the SIG channel has a special name
                    if (channel == 0)
                    {
                        channelName = new string[] { "SIG" }
                    }
                    ;
                    channelSet.AddChannel(channelName, tofAccumulator.GetResult());
                }

                // If the detector is a molecule detector, add the special channels
                if (MOLECULE_DETECTORS.Contains(d))
                {
                    channelSet = AppendChannelSetWithSpecialValues(channelSet);
                }

                // Add the ChannelSet to the demodulated block
                db.AddDetector(d, calibration, channelSet);
            }

            return(db);
        }
Ejemplo n.º 7
0
        public static void Run()
        {
            RILogManager.Default?.SendDebug("MNIST Data Loading...");
            MnistData mnistData = new MnistData(28);

            RILogManager.Default?.SendDebug("Training Start...");

            int           neuronCount = 28;
            FunctionStack nn          = new FunctionStack("Test19",
                                                          new Linear(true, neuronCount * neuronCount, N, name: "l1 Linear"), // L1
                                                          new BatchNormalization(true, N, name: "l1 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l1 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l2 Linear"), // L2
                                                          new BatchNormalization(true, N, name: "l2 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l2 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l3 Linear"), // L3
                                                          new BatchNormalization(true, N, name: "l3 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l3 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l4 Linear"), // L4
                                                          new BatchNormalization(true, N, name: "l4 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l4 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l5 Linear"), // L5
                                                          new BatchNormalization(true, N, name: "l5 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l5 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l6 Linear"), // L6
                                                          new BatchNormalization(true, N, name: "l6 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l6 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l7 Linear"), // L7
                                                          new BatchNormalization(true, N, name: "l7 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l7 ReLU"),
                                                          new Linear(true, N, N, name: "l8 Linear"), // L8
                                                          new BatchNormalization(true, N, name: "l8 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l8 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l9 Linear"), // L9
                                                          new BatchNormalization(true, N, name: "l9 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l9 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l10 Linear"), // L10
                                                          new BatchNormalization(true, N, name: "l10 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l10 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l11 Linear"), // L11
                                                          new BatchNormalization(true, N, name: "l11 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l11 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l12 Linear"), // L12
                                                          new BatchNormalization(true, N, name: "l12 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l12 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l13 Linear"), // L13
                                                          new BatchNormalization(true, N, name: "l13 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l13 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l14 Linear"), // L14
                                                          new BatchNormalization(true, N, name: "l14 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l14 PolynomialApproximantSteep"),
                                                          new Linear(true, N, 10, name: "l15 Linear") // L15
                                                          );


            nn.SetOptimizer(new AdaGrad());
            //nn.SetOptimizer(new Adam());

            RunningStatistics stats             = new RunningStatistics();
            Histogram         lossHistogram     = new Histogram();
            Histogram         accuracyHistogram = new Histogram();
            Real totalLoss        = 0;
            long totalLossCounter = 0;
            Real highestAccuracy  = 0;
            Real bestLocalLoss    = 0;
            Real bestTotalLoss    = 0;

            // First skeleton save
            ModelIO.Save(nn, nn.Name);

            for (int epoch = 0; epoch < 1; epoch++)
            {
                RILogManager.Default?.SendDebug("epoch " + (epoch + 1));
                RILogManager.Default?.ViewerSendWatch("epoch", (epoch + 1));

                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    RILogManager.Default?.SendInformation("batch count " + i + "/" + TRAIN_DATA_COUNT);

                    TestDataSet datasetX = mnistData.GetRandomXSet(BATCH_DATA_COUNT, 28, 28);

                    Real sumLoss = Trainer.Train(nn, datasetX.Data, datasetX.Label, new SoftmaxCrossEntropy());
                    totalLoss += sumLoss;
                    totalLossCounter++;



                    stats.Push(sumLoss);
                    lossHistogram.AddBucket(new Bucket(-10, 10));
                    accuracyHistogram.AddBucket(new Bucket(-10.0, 10));

                    if (sumLoss < bestLocalLoss && sumLoss != Double.NaN)
                    {
                        bestLocalLoss = sumLoss;
                    }
                    if (stats.Mean < bestTotalLoss && sumLoss != Double.NaN)
                    {
                        bestTotalLoss = stats.Mean;
                    }

                    try
                    {
                        lossHistogram.AddData(sumLoss);
                    }
                    catch (Exception)
                    {
                    }

                    if (i % 20 == 0)
                    {
                        RILogManager.Default?.SendDebug("\nbatch count " + i + "/" + TRAIN_DATA_COUNT);
                        RILogManager.Default?.SendDebug("Total/Mean loss " + stats.Mean);
                        RILogManager.Default?.SendDebug("local loss " + sumLoss);

                        RILogManager.Default?.SendInformation("batch count " + i + "/" + TRAIN_DATA_COUNT);
                        RILogManager.Default?.ViewerSendWatch("batch count", i);
                        RILogManager.Default?.ViewerSendWatch("Total/Mean loss", stats.Mean);
                        RILogManager.Default?.ViewerSendWatch("local loss", sumLoss);


                        RILogManager.Default?.SendDebug("");

                        RILogManager.Default?.SendDebug("Testing...");
                        TestDataSet datasetY = mnistData.GetRandomYSet(TEST_DATA_COUNT, 28);
                        Real        accuracy = Trainer.Accuracy(nn, datasetY.Data, datasetY.Label);
                        if (accuracy > highestAccuracy)
                        {
                            highestAccuracy = accuracy;
                        }

                        RILogManager.Default?.SendDebug("Accuracy: " + accuracy);
                        RILogManager.Default?.ViewerSendWatch("Accuracy", accuracy);

                        try
                        {
                            accuracyHistogram.AddData(accuracy);
                        }
                        catch (Exception)
                        {
                        }
                    }
                }
            }

            RILogManager.Default?.SendDebug("Best Accuracy: " + highestAccuracy);
            RILogManager.Default?.SendDebug("Best Total Loss " + bestTotalLoss);
            RILogManager.Default?.SendDebug("Best Local Loss " + bestLocalLoss);

            RILogManager.Default?.ViewerSendWatch("Best Accuracy:", highestAccuracy);
            RILogManager.Default?.ViewerSendWatch("Best Total Loss", bestTotalLoss);
            RILogManager.Default?.ViewerSendWatch("Best Local Loss", bestLocalLoss);

            // Save all with training data
            ModelIO.Save(nn, nn.Name);
        }
Ejemplo n.º 8
0
        public static void Run()
        {
            int neuronCount = 28;

            RILogManager.Default?.SendDebug("MNIST Data Loading...");
            MnistData mnistData = new MnistData(neuronCount);

            RILogManager.Default.SendInformation("Training Start, creating function stack.");

            SortedFunctionStack   nn        = new SortedFunctionStack();
            SortedList <Function> functions = new SortedList <Function>();

            ParallelOptions po = new ParallelOptions();

            po.MaxDegreeOfParallelism = 4;

            for (int x = 0; x < numLayers; x++)
            {
                Application.DoEvents();

                functions.Add(new Linear(true, neuronCount * neuronCount, N, name: $"l{x} Linear"));
                functions.Add(new BatchNormalization(true, N, name: $"l{x} BatchNorm"));
                functions.Add(new ReLU(name: $"l{x} ReLU"));
                RILogManager.Default.ViewerSendWatch("Total Layers", (x + 1));
            }
            ;

            RILogManager.Default.SendInformation("Adding Output Layer");
            Application.DoEvents();
            nn.Add(new Linear(true, N, 10, noBias: false, name: $"l{numLayers + 1} Linear"));
            RILogManager.Default.ViewerSendWatch("Total Layers", numLayers);


            RILogManager.Default.SendInformation("Setting Optimizer to AdaGrad");
            nn.SetOptimizer(new AdaGrad());
            Application.DoEvents();

            RunningStatistics stats             = new RunningStatistics();
            Histogram         lossHistogram     = new Histogram();
            Histogram         accuracyHistogram = new Histogram();
            Real totalLoss        = 0;
            long totalLossCounter = 0;
            Real highestAccuracy  = 0;
            Real bestLocalLoss    = 0;
            Real bestTotalLoss    = 0;

            for (int epoch = 0; epoch < 3; epoch++)
            {
                RILogManager.Default?.SendDebug("epoch " + (epoch + 1));
                RILogManager.Default.SendInformation("epoch " + (epoch + 1));
                RILogManager.Default.ViewerSendWatch("epoch", (epoch + 1));
                Application.DoEvents();

                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    Application.DoEvents();

                    TestDataSet datasetX = mnistData.GetRandomXSet(BATCH_DATA_COUNT, neuronCount, neuronCount);

                    Real sumLoss = Trainer.Train(nn, datasetX.Data, datasetX.Label, new SoftmaxCrossEntropy());
                    totalLoss += sumLoss;
                    totalLossCounter++;

                    stats.Push(sumLoss);
                    lossHistogram.AddBucket(new Bucket(-10, 10));
                    accuracyHistogram.AddBucket(new Bucket(-10.0, 10));

                    if (sumLoss < bestLocalLoss && !double.IsNaN(sumLoss))
                    {
                        bestLocalLoss = sumLoss;
                    }
                    if (stats.Mean < bestTotalLoss && !double.IsNaN(sumLoss))
                    {
                        bestTotalLoss = stats.Mean;
                    }

                    try
                    {
                        lossHistogram.AddData(sumLoss);
                    }
                    catch (Exception)
                    {
                    }

                    if (i % 20 == 0)
                    {
                        RILogManager.Default.ViewerSendWatch("Batch Count ", i);
                        RILogManager.Default.ViewerSendWatch("Total/Mean loss", stats.Mean);
                        RILogManager.Default.ViewerSendWatch("Local loss", sumLoss);
                        RILogManager.Default.SendInformation("Batch Count " + i + "/" + TRAIN_DATA_COUNT + ", epoch " + epoch + 1);
                        RILogManager.Default.SendInformation("Total/Mean loss " + stats.Mean);
                        RILogManager.Default.SendInformation("Local loss " + sumLoss);
                        Application.DoEvents();


                        RILogManager.Default?.SendDebug("Testing...");

                        TestDataSet datasetY = mnistData.GetRandomYSet(TEST_DATA_COUNT, 28);
                        Real        accuracy = Trainer.Accuracy(nn, datasetY?.Data, datasetY.Label);
                        if (accuracy > highestAccuracy)
                        {
                            highestAccuracy = accuracy;
                        }

                        RILogManager.Default?.SendDebug("Accuracy: " + accuracy);

                        RILogManager.Default.ViewerSendWatch("Best Accuracy: ", highestAccuracy);
                        RILogManager.Default.ViewerSendWatch("Best Total Loss ", bestTotalLoss);
                        RILogManager.Default.ViewerSendWatch("Best Local Loss ", bestLocalLoss);
                        Application.DoEvents();

                        try
                        {
                            accuracyHistogram.AddData(accuracy);
                        }
                        catch (Exception)
                        {
                        }
                    }
                }
            }

            ModelIO.Save(nn, Application.StartupPath + "\\test20.nn");
            RILogManager.Default?.SendDebug("Best Accuracy: " + highestAccuracy);
            RILogManager.Default?.SendDebug("Best Total Loss " + bestTotalLoss);
            RILogManager.Default?.SendDebug("Best Local Loss " + bestLocalLoss);
            RILogManager.Default.ViewerSendWatch("Best Accuracy: ", highestAccuracy);
            RILogManager.Default.ViewerSendWatch("Best Total Loss ", bestTotalLoss);
            RILogManager.Default.ViewerSendWatch("Best Local Loss ", bestLocalLoss);
        }