Ejemplo n.º 1
0
        public async Task TestNormalDistribution01()
        {
            const float MEAN = 0.5f;
            const float STANDARD_DEVIATION = 0.2f;

            using var rng = new MultiThreadedRng();
            var dist  = new FastRng.Float.Distributions.NormalS02M05(rng);
            var stats = new RunningStatistics();
            var fra   = new FrequencyAnalysis();

            for (var n = 0; n < 100_000; n++)
            {
                var nextNumber = await dist.NextNumber();

                stats.Push(nextNumber);
                fra.CountThis(nextNumber);
            }

            fra.NormalizeAndPlotEvents(TestContext.WriteLine);

            TestContext.WriteLine($"mean={MEAN} vs. {stats.Mean}");
            TestContext.WriteLine($"variance={STANDARD_DEVIATION * STANDARD_DEVIATION} vs {stats.Variance}");

            Assert.That(stats.Mean, Is.EqualTo(MEAN).Within(0.01f), "Mean is out of range");
            Assert.That(stats.Variance, Is.EqualTo(STANDARD_DEVIATION * STANDARD_DEVIATION).Within(0.01f), "Variance is out of range");
        }
Ejemplo n.º 2
0
 /// Clears all data from this series and resets statistics.
 public void Clear()
 {
     InvalidateRenderCache = true;
     _ptList.Clear();
     XStats = new RunningStatistics();
     YStats = new RunningStatistics();
 }
        public void ShortSequences()
        {
            var stats0 = new RunningStatistics(new double[0]);

            Assert.That(stats0.Skewness, Is.NaN);
            Assert.That(stats0.Kurtosis, Is.NaN);

            var stats1 = new RunningStatistics(new[] { 1.0 });

            Assert.That(stats1.Skewness, Is.NaN);
            Assert.That(stats1.Kurtosis, Is.NaN);

            var stats2 = new RunningStatistics(new[] { 1.0, 2.0 });

            Assert.That(stats2.Skewness, Is.NaN);
            Assert.That(stats2.Kurtosis, Is.NaN);

            var stats3 = new RunningStatistics(new[] { 1.0, 2.0, -3.0 });

            Assert.That(stats3.Skewness, Is.Not.NaN);
            Assert.That(stats3.Kurtosis, Is.NaN);

            var stats4 = new RunningStatistics(new[] { 1.0, 2.0, -3.0, -4.0 });

            Assert.That(stats4.Skewness, Is.Not.NaN);
            Assert.That(stats4.Kurtosis, Is.Not.NaN);
        }
        public void ZeroVarianceSequence()
        {
            var stats = new RunningStatistics(new[] { 2.0, 2.0, 2.0, 2.0 });

            Assert.That(stats.Skewness, Is.NaN);
            Assert.That(stats.Kurtosis, Is.NaN);
        }
Ejemplo n.º 5
0
        public void Store()
        {
            if (!Setup)
            {
                //Setup Mean and Std
                Statistics = new RunningStatistics[Values.Length];
                for (int i = 0; i < Statistics.Length; i++)
                {
                    Statistics[i] = new RunningStatistics();
                }

                //Write Labels
                for (int i = 0; i < Names.Length; i++)
                {
                    Labels.WriteLine("[" + i + "]" + " " + Names[i]);
                }
                Labels.Close();

                Setup = true;
            }

            //Enqueue Sample
            float[] item = (float[])Values.Clone();
            lock (Buffer) {
                Buffer.Enqueue(item);
            }

            //Reset Running Index
            Dim = 0;
        }
Ejemplo n.º 6
0
        public async Task TestUniformDistribution01()
        {
            const float A        = 0.0f;
            const float B        = 1.0f;
            const float MEAN     = 0.5f * (A + B);
            const float VARIANCE = (1.0f / 12.0f) * (B - A) * (B - A);

            using var rng = new MultiThreadedRng();
            var stats = new RunningStatistics();
            var fra   = new FrequencyAnalysis();

            for (var n = 0; n < 100_000; n++)
            {
                var value = await rng.GetUniform();

                stats.Push(value);
                fra.CountThis(value);
            }

            fra.NormalizeAndPlotEvents(TestContext.WriteLine);
            fra.PlotOccurence(TestContext.WriteLine);
            TestContext.WriteLine($"mean={MEAN} vs. {stats.Mean}");
            TestContext.WriteLine($"variance={VARIANCE} vs {stats.Variance}");

            Assert.That(stats.Mean, Is.EqualTo(MEAN).Within(0.01f), "Mean is out of range");
            Assert.That(stats.Variance, Is.EqualTo(VARIANCE).Within(0.001f), "Variance is out of range");
        }
Ejemplo n.º 7
0
        public void Store()
        {
            if (Norm != null)
            {
                if (Mean == null && Std == null)
                {
                    Mean = new RunningStatistics[Values.Length];
                    for (int i = 0; i < Mean.Length; i++)
                    {
                        Mean[i] = new RunningStatistics();
                    }
                    Std = new RunningStatistics[Values.Length];
                    for (int i = 0; i < Std.Length; i++)
                    {
                        Std[i] = new RunningStatistics();
                    }
                }
                for (int i = 0; i < Values.Length; i++)
                {
                    switch (Types[i])
                    {
                    case ID.Standard:                                           //Ground Truth
                        Mean[i].Add(Values[i]);
                        Std[i].Add(Values[i]);
                        break;

                    case ID.Ignore:                                             //Mean 0.0 Std 1.0
                        Mean[i].Add(0f);
                        Std[i].Add(-1f);
                        Std[i].Add(1f);
                        break;

                    case ID.IgnoreMean:                                         //Mean 0.0 Std GT
                        Mean[i].Add(0f);
                        Std[i].Add(Values[i]);
                        break;

                    case ID.IgnoreStd:                                          //Mean GT Std 1.0
                        Mean[i].Add(Values[i]);
                        Std[i].Add(-1f);
                        Std[i].Add(1f);
                        break;
                    }
                }
            }

            if (File != null)
            {
                string line = string.Empty;
                for (int i = 0; i < Values.Length; i++)
                {
                    line += Values[i].ToString(Accuracy) + Separator;
                }
                line = line.Remove(line.Length - 1);
                line = line.Replace(",", ".");
                File.WriteLine(line);
            }

            Dim = 0;
        }
Ejemplo n.º 8
0
        private static void UniformDistributionTest(double[] sampleArr, double lowerBound, double upperBound)
        {
            Array.Sort(sampleArr);
            RunningStatistics runningStats = new RunningStatistics(sampleArr);

            // Skewness should be pretty close to zero (evenly distributed samples)
            if(Math.Abs(runningStats.Skewness) > 0.01) Assert.Fail();
            
            // Mean test.
            double range = upperBound - lowerBound;
            double expectedMean = lowerBound + (range / 2.0);
            double meanErr = expectedMean - runningStats.Mean;
            double maxExpectedErr = range / 1000.0;

            if(Math.Abs(meanErr) > maxExpectedErr) Assert.Fail();

            // Test a range of centile/quantile values.
            double tauStep = (upperBound - lowerBound) / 10.0;

            for(double tau=0; tau <= 1.0; tau += 0.1)
            {
                double quantile = SortedArrayStatistics.Quantile(sampleArr, tau);
                double expectedQuantile = lowerBound + (tau * range);
                double quantileError = expectedQuantile - quantile;
                if(Math.Abs(quantileError) > maxExpectedErr) Assert.Fail();
            }
        }
Ejemplo n.º 9
0
        public void SkewnessConsistentWithR_e1071(string dataSet, double delta, double skewnessType1, double skewnessType2)
        {
            var data  = _data[dataSet];
            var stats = new RunningStatistics(data.Data);

            Assert.That(stats.Skewness, Is.EqualTo(skewnessType2).Within(delta), "Skewness");
            Assert.That(stats.PopulationSkewness, Is.EqualTo(skewnessType1).Within(delta), "PopulationSkewness");
        }
Ejemplo n.º 10
0
        public void KurtosisConsistentWithR_e1071(string dataSet, double kurtosisType1, double kurtosisType2)
        {
            var data  = _data[dataSet];
            var stats = new RunningStatistics(data.Data);

            Assert.That(stats.Kurtosis, Is.EqualTo(kurtosisType2).Within(1e-6), "Kurtosis");
            Assert.That(stats.PopulationKurtosis, Is.EqualTo(kurtosisType1).Within(1e-6), "PopulationKurtosis");
        }
Ejemplo n.º 11
0
        void Start()
        {
            eStatus = EvaluationStatus.LOADING;
            PrintStatus();
            Global.App_datapath          = Application.dataPath;
            cr_options                   = new ContinuousResultOptions();
            cr_options.latencyFrameCount = 1;
            parameters                   = new configuartion_parameters_t(deviceType);
            knownThreshold               = Global.GetDatasetThreshold(deviceType);

            iteration_results = new Results();
            all_results       = new Results();
            user_results      = new Results();

            //
            // Load all data
            //
            participants         = Global.GetParticipantList(deviceType);
            participantsDataset  = new List <Dataset>();
            trainSets            = new List <List <Sample> >();
            participantsFrames   = new List <List <Frame> >();
            participantsCommands = new List <List <GestureCommand> >();
            jkTrainSets          = new List <List <Jackknife.Sample> >();

            long ts1 = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // at beginning

            foreach (int external_PID in participants)
            {
                Dataset dataset = Global.load_subject_dataset(deviceType, external_PID);
                participantsDataset.Add(dataset);

                List <Frame> loadedFrames = new List <Frame>();
                Global.load_session(deviceType, external_PID, loadedFrames, dataset);
                participantsFrames.Add(loadedFrames);

                List <GestureCommand> commands = new List <GestureCommand>();
                GestureCommand.GetAllCommands(commands, dataset, deviceType, external_PID);
                participantsCommands.Add(commands);

                List <Sample> trainSet = Global.GetTrainSet(dataset, trainCount);
                trainSets.Add(trainSet);

                List <Jackknife.Sample> jkTrainSet = JackknifeConnector.GetJKTrainSet(trainSet);
                jkTrainSets.Add(jkTrainSet);
            }

            long ts2 = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // after loading

            Debug.Log(string.Format("Loaded participants data. Time elapsed: {0}s", (ts2 - ts1) / 1000.0));

            iteration = 0;
            currentParticipantIndex = 0;
            currentParticipantID    = participants[currentParticipantIndex];
            eStatus = EvaluationStatus.TRAINING;

            timeStats_Overall       = new RunningStatistics();
            timeStats_UserDependent = new RunningStatistics();
        }
Ejemplo n.º 12
0
        private bool DoSomeWork(Library library, PDFDocument pdf_document, RunningStatistics stats)
        {
            if (Utilities.Shutdownable.ShutdownableManager.Instance.IsShuttingDown)
            {
                Logging.Debug特("Breaking out of MetadataExtractionDaemon PDF processing loop due to daemon termination");
                return(false);
            }

            // Start rendering the first page so we can do some extraction
            try
            {
                //if (pdf_document.DocumentExists) -- already tested in collection loop above
                pdf_document.PDFRenderer.GetOCRText(1);
            }
            catch (Exception ex)
            {
                Logging.Error(ex, "There was an exception while requesting the first page to be OCRed while processing document {0}", pdf_document.Fingerprint);
            }

            StatusManager.Instance.UpdateStatus("AutoSuggestMetadata", "Suggesting metadata", stats.currentdocumentIndex, stats.totalDocumentCount, true);
            if (StatusManager.Instance.IsCancelled("AutoSuggestMetadata"))
            {
                return(false);
            }

            // Try get the authors and year with the PDF in-file metadata
            try
            {
                PDFMetadataInferenceFromPDFMetadata.InferFromPDFMetadata(pdf_document);
            }
            catch (Exception ex)
            {
                Logging.Warn(ex, "Problem in PDFMetadataInferenceFromPDFMetadata.InferFromPDFMetadata while processing document {0}", pdf_document.Fingerprint);
            }

            // Try looking for the title in the OCR
            try
            {
                PDFMetadataInferenceFromOCR.InferTitleFromOCR(pdf_document);
            }
            catch (Exception ex)
            {
                Logging.Warn(ex, "Problem in PDFMetadataInferenceFromOCR.InferTitleFromOCR while processing document {0}", pdf_document.Fingerprint);
            }

            // Try suggesting some bibtex from bibtexsearch.com
            try
            {
                PDFMetadataInferenceFromBibTeXSearch.InferBibTeX(pdf_document, false);
            }
            catch (Exception ex)
            {
                Logging.Warn(ex, "Problem in PDFMetadataInferenceFromOCR.InferTitleFromOCR while processing document {0}", pdf_document.Fingerprint);
            }

            return(true);
        }
Ejemplo n.º 13
0
        public void Combine()
        {
            var rnd = new SystemRandomSource(10);
            var a   = Generate.Random(200, new Erlang(2, 0.2, rnd));
            var b   = Generate.Random(100, new Beta(1.2, 1.4, rnd));
            var c   = Generate.Random(150, new Rayleigh(0.8, rnd));

            var d = a.Concat(b).Concat(c).ToArray();

            var x = new RunningStatistics(d);

            var y = new RunningStatistics(a);

            y.PushRange(b);
            y.PushRange(c);

            var za = new RunningStatistics(a);
            var zb = new RunningStatistics(b);
            var zc = new RunningStatistics(c);
            var z  = za + zb + zc;

            Assert.That(x.Mean, Is.EqualTo(d.Mean()).Within(1e-12), "Mean Reference");
            Assert.That(y.Mean, Is.EqualTo(x.Mean).Within(1e-12), "Mean y");
            Assert.That(z.Mean, Is.EqualTo(x.Mean).Within(1e-12), "Mean z");

            Assert.That(x.Variance, Is.EqualTo(d.Variance()).Within(1e-12), "Variance Reference");
            Assert.That(y.Variance, Is.EqualTo(x.Variance).Within(1e-12), "Variance y");
            Assert.That(z.Variance, Is.EqualTo(x.Variance).Within(1e-12), "Variance z");

            Assert.That(x.PopulationVariance, Is.EqualTo(d.PopulationVariance()).Within(1e-12), "PopulationVariance Reference");
            Assert.That(y.PopulationVariance, Is.EqualTo(x.PopulationVariance).Within(1e-12), "PopulationVariance y");
            Assert.That(z.PopulationVariance, Is.EqualTo(x.PopulationVariance).Within(1e-12), "PopulationVariance z");

            Assert.That(x.StandardDeviation, Is.EqualTo(d.StandardDeviation()).Within(1e-12), "StandardDeviation Reference");
            Assert.That(y.StandardDeviation, Is.EqualTo(x.StandardDeviation).Within(1e-12), "StandardDeviation y");
            Assert.That(z.StandardDeviation, Is.EqualTo(x.StandardDeviation).Within(1e-12), "StandardDeviation z");

            Assert.That(x.PopulationStandardDeviation, Is.EqualTo(d.PopulationStandardDeviation()).Within(1e-12), "PopulationStandardDeviation Reference");
            Assert.That(y.PopulationStandardDeviation, Is.EqualTo(x.PopulationStandardDeviation).Within(1e-12), "PopulationStandardDeviation y");
            Assert.That(z.PopulationStandardDeviation, Is.EqualTo(x.PopulationStandardDeviation).Within(1e-12), "PopulationStandardDeviation z");

            Assert.That(x.Skewness, Is.EqualTo(d.Skewness()).Within(1e-12), "Skewness Reference (not independent!)");
            Assert.That(y.Skewness, Is.EqualTo(x.Skewness).Within(1e-12), "Skewness y");
            Assert.That(z.Skewness, Is.EqualTo(x.Skewness).Within(1e-12), "Skewness z");

            Assert.That(x.PopulationSkewness, Is.EqualTo(d.PopulationSkewness()).Within(1e-12), "PopulationSkewness Reference (not independent!)");
            Assert.That(y.PopulationSkewness, Is.EqualTo(x.PopulationSkewness).Within(1e-12), "PopulationSkewness y");
            Assert.That(z.PopulationSkewness, Is.EqualTo(x.PopulationSkewness).Within(1e-12), "PopulationSkewness z");

            Assert.That(x.Kurtosis, Is.EqualTo(d.Kurtosis()).Within(1e-12), "Kurtosis Reference (not independent!)");
            Assert.That(y.Kurtosis, Is.EqualTo(x.Kurtosis).Within(1e-12), "Kurtosis y");
            Assert.That(z.Kurtosis, Is.EqualTo(x.Kurtosis).Within(1e-12), "Kurtosis z");

            Assert.That(x.PopulationKurtosis, Is.EqualTo(d.PopulationKurtosis()).Within(1e-12), "PopulationKurtosis Reference (not independent!)");
            Assert.That(y.PopulationKurtosis, Is.EqualTo(x.PopulationKurtosis).Within(1e-12), "PopulationKurtosis y");
            Assert.That(z.PopulationKurtosis, Is.EqualTo(x.PopulationKurtosis).Within(1e-12), "PopulationKurtosis z");
        }
Ejemplo n.º 14
0
        public static ConfidenceInterval ConfidenceInterval(this RunningStatistics statistics,
                                                            double z)
        {
            var sd    = statistics.StandardDeviation;
            var mean  = statistics.Mean;
            var error = sd / Math.Sqrt(statistics.Count);

            return(new ConfidenceInterval(mean, z * error));
        }
Ejemplo n.º 15
0
        public void ConsistentWithNist(string dataSet, int digits, double skewness, double kurtosis, double median, double min, double max, int count)
        {
            var data  = _data[dataSet];
            var stats = new RunningStatistics(data.Data);

            AssertHelpers.AlmostEqualRelative(data.Mean, stats.Mean, 10);
            AssertHelpers.AlmostEqualRelative(data.StandardDeviation, stats.StandardDeviation, digits);
            AssertHelpers.AlmostEqualRelative(skewness, stats.Skewness, 8);
            AssertHelpers.AlmostEqualRelative(kurtosis, stats.Kurtosis, 8);
            Assert.AreEqual(stats.Minimum, min);
            Assert.AreEqual(stats.Maximum, max);
            Assert.AreEqual(stats.Count, count);
        }
Ejemplo n.º 16
0
        private void TrackMetric(string metricName, double value)
        {
            lock (_metrics)
            {
                if (!_metrics.TryGetValue(metricName, out RunningStatistics statistics))
                {
                    statistics = new RunningStatistics();
                    _metrics.Add(metricName, statistics);
                }

                statistics.Push(value);
            }
        }
Ejemplo n.º 17
0
        void logResultsToFile(string fname, Results results, RunningStatistics timeStats, string label)
        {
            double       f       = 1000.0;
            StreamWriter outfile = new StreamWriter(fname, true);

            outfile.Write((segmentorType == SegmentorType.MACHETE ? "MACHETE" : "WINDOW") + "," + label + "," +
                          trainCount + ",");
            outfile.Write(string.Format("{0:F4},{1:F4},{2:F4},{3:F4},{4:F4},{5:F4},{6:F4}", (timeStats.mean * f),
                                        (timeStats.minimum * f), (timeStats.maximum * f), (timeStats.std * f), (timeStats.variance * f),
                                        (timeStats.ci_lower() * f), (timeStats.ci_upper() * f)));
            outfile.Write((segmentorType == SegmentorType.MACHETE ? ",-1" : string.Format(", {0}", window.mode)));
            outfile.WriteLine();

            outfile.Close();
        }
        public void RunningStatisticsWithInfinityNaNDataContractSerializationTest()
        {
            var expected = new RunningStatistics(new[] { 1.0, 2.0, 3.0, double.PositiveInfinity, double.NaN });

            var serializer = new DataContractSerializer(typeof(RunningStatistics));
            var stream     = new MemoryStream();

            serializer.WriteObject(stream, expected);

            stream.Position = 0;
            var actual = (RunningStatistics)serializer.ReadObject(stream);

            Assert.That(actual.Count, Is.EqualTo(expected.Count));
            Assert.That(actual.Maximum, Is.EqualTo(expected.Maximum));
            Assert.That(actual.Mean, Is.EqualTo(expected.Mean));
        }
        public static void TestSimpleStats(ISampler <double> sampler)
        {
            const int sampleCount = 20_000_000;

            RunningStatistics runningStats = new RunningStatistics();

            for (int i = 0; i < sampleCount; i++)
            {
                runningStats.Push(sampler.Sample());
            }

            Assert.True(Math.Abs(runningStats.Mean) < 0.001);
            Assert.True(Math.Abs(runningStats.StandardDeviation - 1.0) < 0.0005);
            Assert.True(Math.Abs(runningStats.Skewness) < 0.01);
            Assert.True(Math.Abs(runningStats.Kurtosis) < 0.01);
        }
Ejemplo n.º 20
0
        private void TrackMetricTelemetry(string metricName, RunningStatistics statistics)
        {
            var properties = new Dictionary <string, string>()
            {
                { "Count", statistics.Count.ToString(CultureInfo.InvariantCulture) },
                { "Mean", statistics.Mean.ToString(CultureInfo.InvariantCulture) },
                { "Max", statistics.Maximum.ToString(CultureInfo.InvariantCulture) },
                { "Min", statistics.Minimum.ToString(CultureInfo.InvariantCulture) },
            };

            if (statistics.Count >= 2)
            {
                properties.Add("StandardDeviation", statistics.StandardDeviation.ToString(CultureInfo.InvariantCulture));
            }

            TrackEvent(metricName, properties);
        }
Ejemplo n.º 21
0
        public static void UniformDistributionTest(double[] sampleArr, double minValue, double maxValue)
        {
            Array.Sort(sampleArr);
            RunningStatistics runningStats = new RunningStatistics(sampleArr);

            // Skewness should be pretty close to zero (evenly distributed samples)
            if (Math.Abs(runningStats.Skewness) > 0.01)
            {
                Assert.Fail();
            }

            // Mean test.
            double range          = maxValue - minValue;
            double expectedMean   = minValue + (range / 2.0);
            double meanErr        = expectedMean - runningStats.Mean;
            double maxExpectedErr = range / 1000.0;

            if (Math.Abs(meanErr) > maxExpectedErr)
            {
                Assert.Fail();
            }

            // Test a range of centile/quantile values.
            double tauStep = (maxValue - minValue) / 10.0;

            for (double tau = 0; tau <= 1.0; tau += 0.1)
            {
                double quantile         = SortedArrayStatistics.Quantile(sampleArr, tau);
                double expectedQuantile = minValue + (tau * range);
                double quantileError    = expectedQuantile - quantile;
                if (Math.Abs(quantileError) > maxExpectedErr)
                {
                    Assert.Fail();
                }
            }

            // Test that no samples are outside the defined range.
            for (int i = 0; i < sampleArr.Length; i++)
            {
                Assert.IsTrue(sampleArr[i] >= minValue && sampleArr[i] < maxValue);
            }
        }
Ejemplo n.º 22
0
        private MetricTelemetry CreateMetricTelemetry(string metricName, RunningStatistics statistics)
        {
            var telemetry = new MetricTelemetry()
            {
                Name  = metricName,
                Count = (int)statistics.Count,
                Value = statistics.Mean,
                Max   = statistics.Maximum,
                Min   = statistics.Minimum
            };

            if (statistics.Count >= 2)
            {
                telemetry.StandardDeviation = statistics.StandardDeviation;
            }

            SetCommonProperties(telemetry.Properties);

            return(telemetry);
        }
Ejemplo n.º 23
0
        public void Flush()
        {
            if (!IsEnabled)
            {
                return;
            }

            lock (_metrics)
            {
                foreach (var metric in _metrics)
                {
                    RunningStatistics statistics = metric.Value;
                    if (statistics.Count > 0)
                    {
                        TrackMetricTelemetry(metric.Key, statistics);
                    }
                }

                _metrics.Clear();
            }
        }
Ejemplo n.º 24
0
        public double[] GatedMeanAndUncertainty(double startTime, double endTime)
        {
            double[] mne = new double[2];

            double[] trimmedGates = TrimGates(startTime, endTime);
            if (trimmedGates == null)
            {
                return(null);
            }
            startTime = trimmedGates[0];
            endTime   = trimmedGates[1];

            int low  = (int)Math.Ceiling((startTime - gateStartTime) / clockPeriod);
            int high = (int)Math.Floor((endTime - gateStartTime) / clockPeriod);

            // check the range is sensible
            if (low < 0)
            {
                low = 0;
            }
            if (high > this.Length - 1)
            {
                high = this.Length - 1;
            }
            if (low > high)
            {
                return(null);
            }

            RunningStatistics stats = new RunningStatistics();

            for (int i = low; i <= high; i++)
            {
                stats.Push(tofData[i]);
            }

            mne[0] = stats.Mean;
            mne[1] = stats.StandardErrorOfSampleMean;
            return(mne);
        }
Ejemplo n.º 25
0
        public void Flush()
        {
            if (!IsEnabled)
            {
                return;
            }

            lock (_metrics)
            {
                foreach (var metric in _metrics)
                {
                    RunningStatistics statistics = metric.Value;
                    if (statistics.Count > 0)
                    {
                        MetricTelemetry metrics = CreateMetricTelemetry(metric.Key, statistics);
                        HockeyClient.Current.TrackMetric(metrics);
                    }
                }

                _metrics.Clear();
            }

            HockeyClient.Current.Flush();
        }
        public void KurtosisConsistentWithR_e1071(string dataSet, double kurtosisType1, double kurtosisType2)
        {
            var data = _data[dataSet];
            var stats = new RunningStatistics(data.Data);

            Assert.That(stats.Kurtosis, Is.EqualTo(kurtosisType2).Within(1e-6), "Kurtosis");
            Assert.That(stats.PopulationKurtosis, Is.EqualTo(kurtosisType1).Within(1e-6), "PopulationKurtosis");
        }
Ejemplo n.º 27
0
        public static void Run()
        {
            int neuronCount = 28;

            RILogManager.Default?.SendDebug("MNIST Data Loading...");
            MnistData mnistData = new MnistData(neuronCount);

            RILogManager.Default.SendInformation("Training Start, creating function stack.");

            SortedFunctionStack   nn        = new SortedFunctionStack();
            SortedList <Function> functions = new SortedList <Function>();

            ParallelOptions po = new ParallelOptions();

            po.MaxDegreeOfParallelism = 4;

            for (int x = 0; x < numLayers; x++)
            {
                Application.DoEvents();

                functions.Add(new Linear(true, neuronCount * neuronCount, N, name: $"l{x} Linear"));
                functions.Add(new BatchNormalization(true, N, name: $"l{x} BatchNorm"));
                functions.Add(new ReLU(name: $"l{x} ReLU"));
                RILogManager.Default.ViewerSendWatch("Total Layers", (x + 1));
            }
            ;

            RILogManager.Default.SendInformation("Adding Output Layer");
            Application.DoEvents();
            nn.Add(new Linear(true, N, 10, noBias: false, name: $"l{numLayers + 1} Linear"));
            RILogManager.Default.ViewerSendWatch("Total Layers", numLayers);


            RILogManager.Default.SendInformation("Setting Optimizer to AdaGrad");
            nn.SetOptimizer(new AdaGrad());
            Application.DoEvents();

            RunningStatistics stats             = new RunningStatistics();
            Histogram         lossHistogram     = new Histogram();
            Histogram         accuracyHistogram = new Histogram();
            Real totalLoss        = 0;
            long totalLossCounter = 0;
            Real highestAccuracy  = 0;
            Real bestLocalLoss    = 0;
            Real bestTotalLoss    = 0;

            for (int epoch = 0; epoch < 3; epoch++)
            {
                RILogManager.Default?.SendDebug("epoch " + (epoch + 1));
                RILogManager.Default.SendInformation("epoch " + (epoch + 1));
                RILogManager.Default.ViewerSendWatch("epoch", (epoch + 1));
                Application.DoEvents();

                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    Application.DoEvents();

                    TestDataSet datasetX = mnistData.GetRandomXSet(BATCH_DATA_COUNT, neuronCount, neuronCount);

                    Real sumLoss = Trainer.Train(nn, datasetX.Data, datasetX.Label, new SoftmaxCrossEntropy());
                    totalLoss += sumLoss;
                    totalLossCounter++;

                    stats.Push(sumLoss);
                    lossHistogram.AddBucket(new Bucket(-10, 10));
                    accuracyHistogram.AddBucket(new Bucket(-10.0, 10));

                    if (sumLoss < bestLocalLoss && !double.IsNaN(sumLoss))
                    {
                        bestLocalLoss = sumLoss;
                    }
                    if (stats.Mean < bestTotalLoss && !double.IsNaN(sumLoss))
                    {
                        bestTotalLoss = stats.Mean;
                    }

                    try
                    {
                        lossHistogram.AddData(sumLoss);
                    }
                    catch (Exception)
                    {
                    }

                    if (i % 20 == 0)
                    {
                        RILogManager.Default.ViewerSendWatch("Batch Count ", i);
                        RILogManager.Default.ViewerSendWatch("Total/Mean loss", stats.Mean);
                        RILogManager.Default.ViewerSendWatch("Local loss", sumLoss);
                        RILogManager.Default.SendInformation("Batch Count " + i + "/" + TRAIN_DATA_COUNT + ", epoch " + epoch + 1);
                        RILogManager.Default.SendInformation("Total/Mean loss " + stats.Mean);
                        RILogManager.Default.SendInformation("Local loss " + sumLoss);
                        Application.DoEvents();


                        RILogManager.Default?.SendDebug("Testing...");

                        TestDataSet datasetY = mnistData.GetRandomYSet(TEST_DATA_COUNT, 28);
                        Real        accuracy = Trainer.Accuracy(nn, datasetY?.Data, datasetY.Label);
                        if (accuracy > highestAccuracy)
                        {
                            highestAccuracy = accuracy;
                        }

                        RILogManager.Default?.SendDebug("Accuracy: " + accuracy);

                        RILogManager.Default.ViewerSendWatch("Best Accuracy: ", highestAccuracy);
                        RILogManager.Default.ViewerSendWatch("Best Total Loss ", bestTotalLoss);
                        RILogManager.Default.ViewerSendWatch("Best Local Loss ", bestLocalLoss);
                        Application.DoEvents();

                        try
                        {
                            accuracyHistogram.AddData(accuracy);
                        }
                        catch (Exception)
                        {
                        }
                    }
                }
            }

            ModelIO.Save(nn, Application.StartupPath + "\\test20.nn");
            RILogManager.Default?.SendDebug("Best Accuracy: " + highestAccuracy);
            RILogManager.Default?.SendDebug("Best Total Loss " + bestTotalLoss);
            RILogManager.Default?.SendDebug("Best Local Loss " + bestLocalLoss);
            RILogManager.Default.ViewerSendWatch("Best Accuracy: ", highestAccuracy);
            RILogManager.Default.ViewerSendWatch("Best Total Loss ", bestTotalLoss);
            RILogManager.Default.ViewerSendWatch("Best Local Loss ", bestLocalLoss);
        }
Ejemplo n.º 28
0
        public void DoMaintenance(Library library, Action callback_after_some_work_done)
        {
            Stopwatch clk = Stopwatch.StartNew();

            Logging.Debug特("MetadataExtractionDaemon::DoMaintenance START");

            RunningStatistics stats = new RunningStatistics();

            // To recover from a search index fatal failure and re-indexing attempt for very large libraries,
            // we're better off processing a limited number of source files as we'll be able to see
            // *some* results more quickly and we'll have a working, though yet incomplete,
            // index in *reasonable time*.
            //
            // Reconstructing the entire index will take a *long* time. We grow the index and other meta
            // stores a bunch-of-files at a time and then repeat the entire maintenance process until
            // we'll be sure to have run out of files to process for sure...
            const int MAX_NUMBER_OF_PDF_FILES_TO_PROCESS = 30;
            const int MIN_NUMBER_OF_PDF_FILES_TO_PROCESS_PER_ITERATION = 10;
            const int MAX_SECONDS_PER_ITERATION = 10 * 60;
            long      clk_bound = clk.ElapsedMilliseconds + MAX_SECONDS_PER_ITERATION * 1000;

            try
            {
                // If this library is busy, skip it for now
                if (Library.IsBusyAddingPDFs || Library.IsBusyRegeneratingTags)
                {
                    Logging.Debug特("MetadataExtractionDaemon::DoMaintenance: Not daemon processing any library that is busy with adds...");
                    return;
                }

                if (Utilities.Shutdownable.ShutdownableManager.Instance.IsShuttingDown)
                {
                    Logging.Debug特("MetadataExtractionDaemon::DoMaintenance: Breaking out of outer processing loop due to application termination");
                    return;
                }

                if (Common.Configuration.ConfigurationManager.Instance.ConfigurationRecord.DisableAllBackgroundTasks)
                {
                    Logging.Debug特("MetadataExtractionDaemon::DoMaintenance: Breaking out of outer processing loop due to DisableAllBackgroundTasks");
                    return;
                }

                // Check that we have something to do
                List <PDFDocument> pdf_documents = library.PDFDocuments;
                stats.totalDocumentCount      = pdf_documents.Count;
                stats.currentdocumentIndex    = 0;
                stats.documentsProcessedCount = 0;
                foreach (PDFDocument pdf_document in pdf_documents)
                {
                    int needs_processing = 0;

                    stats.currentdocumentIndex++;

                    // there's nothing to infer from PDF when there's no PDF to process:
                    if (!pdf_document.DocumentExists)
                    {
                        continue;
                    }

                    if (PDFMetadataInferenceFromPDFMetadata.NeedsProcessing(pdf_document))
                    {
                        needs_processing |= 0x01;
                    }
                    if (PDFMetadataInferenceFromOCR.NeedsProcessing(pdf_document))
                    {
                        needs_processing |= 0x02;
                    }
                    if (PDFMetadataInferenceFromBibTeXSearch.NeedsProcessing(pdf_document))
                    {
                        needs_processing |= 0x04;
                    }

                    if (needs_processing != 0)
                    {
                        pdfs_retry_count.TallyOne(pdf_document.Fingerprint);
                        int cnt = pdfs_retry_count.GetCount(pdf_document.Fingerprint);
                        if (!General.IsPowerOfTwo(cnt))
                        {
                            needs_processing = 0;  // skip this time around
                        }
#if true
                        // Reset counter when it has run up to 64 (which means 6 attempts were made up to now).
                        if (cnt > 64)
                        {
                            pdfs_retry_count.ResetTally(pdf_document.Fingerprint);
                        }
#endif
                    }

                    // Previous check calls MAY take some serious time, hence we SHOULD check again whether
                    // the user decided to exit Qiqqa before we go on and do more time consuming work.
                    if (Utilities.Shutdownable.ShutdownableManager.Instance.IsShuttingDown)
                    {
                        Logging.Debug特("Breaking out of MetadataExtractionDaemon PDF fingerprinting loop due to daemon termination");
                        return;
                    }

                    if (needs_processing != 0)
                    {
                        if (DoSomeWork(library, pdf_document, stats))
                        {
                            stats.documentsProcessedCount++;
                        }
                    }

                    // Limit the number of source files to process before we go and create/update
                    // a sane (though tiny and incomplete) Lucene search index database so that
                    // we have some up-to-date results ready whenever the user exits the Qiqqa application
                    // while this process is still running.
                    // When the user keeps Qiqqa running, this same approach will help us to 'update'
                    // the search index a bunch of files at a time, so everyone involved will be able
                    // to see progress happening after losing the index due to some fatal crash or
                    // forced re-index request.
                    if ((stats.documentsProcessedCount + 1) % MAX_NUMBER_OF_PDF_FILES_TO_PROCESS == 0)
                    {
                        Logging.Debug特("Interupting the MetadataExtractionDaemon PDF fingerprinting loop due to MAX_NUMBER_OF_PDF_FILES_TO_PROCESS reached");

                        callback_after_some_work_done();
                    }

                    // A timeout should only kick in when we have *some* work done already or
                    // we would have introduced a subtle bug for very large libraries: if the timeout
                    // is short enough for the library scan to take that long on a slow machine,
                    // the timeout would, by itself, cause no work to be done, *ever*.
                    // Hence we require a minimum amount of work done before the timeout condition
                    // is allowed to fire.
                    if (clk_bound <= clk.ElapsedMilliseconds && stats.documentsProcessedCount >= MIN_NUMBER_OF_PDF_FILES_TO_PROCESS_PER_ITERATION)
                    {
                        Logging.Debug特("Breaking out of MetadataExtractionDaemon PDF fingerprinting loop due to MAX_SECONDS_PER_ITERATION: {0} ms consumed", clk.ElapsedMilliseconds);
                        return;
                    }
                }
            }
            finally
            {
                if (0 < stats.documentsProcessedCount)
                {
                    Logging.Debug特("Got {0} items of metadata extraction work done.", stats.documentsProcessedCount);
                }
                else
                {
                    // nothing to do.
                    Logging.Debug特("MetadataExtractionDaemon::DoMaintenance: Breaking out of outer processing loop due to no more files to process right now.");

                    // when there's nothing to do, reset the retry tallying by doing a hard reset:
                    // the idea here being that delaying any retries on pending items is useless when
                    // there's nothing to do otherwise.
                    pdfs_retry_count = new CountingDictionary <string>();   // quickest and cleanest reset is a re-init (+ GarbageCollect of the old dict)
                }

                Logging.Info("{0}ms were spent to extract metadata", clk.ElapsedMilliseconds);
                StatusManager.Instance.ClearStatus("AutoSuggestMetadata");

                callback_after_some_work_done();
            }
        }
Ejemplo n.º 29
0
        private static Dictionary<string, Dictionary<string, TestResult>> SummarizeTestResults(IEnumerable<TestIterationResult> allIterations)
        {
            var testResults = new Dictionary<string, Dictionary<string, TestResult>>();

            foreach (var iteration in allIterations)
            {
                Dictionary<string, TestResult> runResults;
                if (!testResults.TryGetValue(iteration.RunId, out runResults))
                    testResults[iteration.RunId] = runResults = new Dictionary<string, TestResult>();

                TestResult result;
                if (!runResults.TryGetValue(iteration.TestName, out result))
                {
                    runResults[iteration.TestName] = result = new TestResult();
                    result.RunId = iteration.RunId;
                    result.TestName = iteration.TestName;
                }

                foreach (var metric in iteration.MetricValues)
                {
                    RunningStatistics stats;
                    if (!result.Stats.TryGetValue(metric.Key, out stats))
                        result.Stats[metric.Key] = stats = new RunningStatistics();
                    stats.Push(metric.Value);
                }

                result.Iterations.Add(iteration);
            }

            return testResults;
        }
        public void ConsistentWithNist(string dataSet, int digits, double skewness, double kurtosis, double median, double min, double max, int count)
        {
            var data = _data[dataSet];
            var stats = new RunningStatistics(data.Data);

            AssertHelpers.AlmostEqualRelative(data.Mean, stats.Mean, 10);
            AssertHelpers.AlmostEqualRelative(data.StandardDeviation, stats.StandardDeviation, digits);
            AssertHelpers.AlmostEqualRelative(skewness, stats.Skewness, 8);
            AssertHelpers.AlmostEqualRelative(kurtosis, stats.Kurtosis, 8);
            Assert.AreEqual(stats.Minimum, min);
            Assert.AreEqual(stats.Maximum, max);
            Assert.AreEqual(stats.Count, count);
        }
        public void Combine()
        {
            var rnd = new SystemRandomSource(10);
            var a = Generate.Random(200, new Erlang(2, 0.2, rnd));
            var b = Generate.Random(100, new Beta(1.2, 1.4, rnd));
            var c = Generate.Random(150, new Rayleigh(0.8, rnd));

            var d = a.Concat(b).Concat(c).ToArray();

            var x = new RunningStatistics(d);

            var y = new RunningStatistics(a);
            y.PushRange(b);
            y.PushRange(c);

            var za  = new RunningStatistics(a);
            var zb  = new RunningStatistics(b);
            var zc  = new RunningStatistics(c);
            var z = za + zb + zc;

            Assert.That(x.Mean, Is.EqualTo(d.Mean()).Within(1e-12), "Mean Reference");
            Assert.That(y.Mean, Is.EqualTo(x.Mean).Within(1e-12), "Mean y");
            Assert.That(z.Mean, Is.EqualTo(x.Mean).Within(1e-12), "Mean z");

            Assert.That(x.Variance, Is.EqualTo(d.Variance()).Within(1e-12), "Variance Reference");
            Assert.That(y.Variance, Is.EqualTo(x.Variance).Within(1e-12), "Variance y");
            Assert.That(z.Variance, Is.EqualTo(x.Variance).Within(1e-12), "Variance z");

            Assert.That(x.PopulationVariance, Is.EqualTo(d.PopulationVariance()).Within(1e-12), "PopulationVariance Reference");
            Assert.That(y.PopulationVariance, Is.EqualTo(x.PopulationVariance).Within(1e-12), "PopulationVariance y");
            Assert.That(z.PopulationVariance, Is.EqualTo(x.PopulationVariance).Within(1e-12), "PopulationVariance z");

            Assert.That(x.StandardDeviation, Is.EqualTo(d.StandardDeviation()).Within(1e-12), "StandardDeviation Reference");
            Assert.That(y.StandardDeviation, Is.EqualTo(x.StandardDeviation).Within(1e-12), "StandardDeviation y");
            Assert.That(z.StandardDeviation, Is.EqualTo(x.StandardDeviation).Within(1e-12), "StandardDeviation z");

            Assert.That(x.PopulationStandardDeviation, Is.EqualTo(d.PopulationStandardDeviation()).Within(1e-12), "PopulationStandardDeviation Reference");
            Assert.That(y.PopulationStandardDeviation, Is.EqualTo(x.PopulationStandardDeviation).Within(1e-12), "PopulationStandardDeviation y");
            Assert.That(z.PopulationStandardDeviation, Is.EqualTo(x.PopulationStandardDeviation).Within(1e-12), "PopulationStandardDeviation z");

            Assert.That(x.Skewness, Is.EqualTo(d.Skewness()).Within(1e-12), "Skewness Reference (not independent!)");
            Assert.That(y.Skewness, Is.EqualTo(x.Skewness).Within(1e-12), "Skewness y");
            Assert.That(z.Skewness, Is.EqualTo(x.Skewness).Within(1e-12), "Skewness z");

            Assert.That(x.PopulationSkewness, Is.EqualTo(d.PopulationSkewness()).Within(1e-12), "PopulationSkewness Reference (not independent!)");
            Assert.That(y.PopulationSkewness, Is.EqualTo(x.PopulationSkewness).Within(1e-12), "PopulationSkewness y");
            Assert.That(z.PopulationSkewness, Is.EqualTo(x.PopulationSkewness).Within(1e-12), "PopulationSkewness z");

            Assert.That(x.Kurtosis, Is.EqualTo(d.Kurtosis()).Within(1e-12), "Kurtosis Reference (not independent!)");
            Assert.That(y.Kurtosis, Is.EqualTo(x.Kurtosis).Within(1e-12), "Kurtosis y");
            Assert.That(z.Kurtosis, Is.EqualTo(x.Kurtosis).Within(1e-12), "Kurtosis z");

            Assert.That(x.PopulationKurtosis, Is.EqualTo(d.PopulationKurtosis()).Within(1e-12), "PopulationKurtosis Reference (not independent!)");
            Assert.That(y.PopulationKurtosis, Is.EqualTo(x.PopulationKurtosis).Within(1e-12), "PopulationKurtosis y");
            Assert.That(z.PopulationKurtosis, Is.EqualTo(x.PopulationKurtosis).Within(1e-12), "PopulationKurtosis z");
        }
        public void SkewnessConsistentWithR_e1071(string dataSet, double delta, double skewnessType1, double skewnessType2)
        {
            var data = _data[dataSet];
            var stats = new RunningStatistics(data.Data);

            Assert.That(stats.Skewness, Is.EqualTo(skewnessType2).Within(delta), "Skewness");
            Assert.That(stats.PopulationSkewness, Is.EqualTo(skewnessType1).Within(delta), "PopulationSkewness");
        }
Ejemplo n.º 33
0
        public DemodulatedBlock DemodulateBlock(Block b, DemodulationConfig demodulationConfig, int[] tofChannelsToAnalyse)
        {
            if (!b.detectors.Contains("asymmetry"))
            {
                b.AddDetectorsToBlock();
            }

            int blockLength = b.Points.Count;

            DemodulatedBlock db = new DemodulatedBlock(b.TimeStamp, b.Config, demodulationConfig);

            Dictionary <string, double[]> pointDetectorData = new Dictionary <string, double[]>();

            foreach (string d in demodulationConfig.GatedDetectors)
            {
                pointDetectorData.Add(d, GetGatedDetectorData(b, d, demodulationConfig.Gates.GetGate(d)));
            }
            foreach (string d in demodulationConfig.PointDetectors)
            {
                pointDetectorData.Add(d, GetPointDetectorData(b, d));
            }

            Dictionary <string, TOF[]> tofDetectorData = new Dictionary <string, TOF[]>();

            foreach (string d in demodulationConfig.TOFDetectors)
            {
                tofDetectorData.Add(d, GetTOFDetectorData(b, d));
            }

            // ----Demodulate channels----
            // --Build list of modulations--
            List <Modulation> modulations = GetModulations(b);

            // --Work out switch state for each point--
            List <uint> switchStates = GetSwitchStates(modulations);

            // --Calculate state signs for each analysis channel--
            // The first index selects the analysis channel, the second the switchState
            int numStates = (int)Math.Pow(2, modulations.Count);

            int[,] stateSigns = GetStateSigns(numStates);

            // --This is done for each point/gated detector--
            foreach (string d in pointDetectorData.Keys)
            {
                int detectorIndex = b.detectors.IndexOf(d);

                // We obtain one Channel Set for each detector
                ChannelSet <PointWithError> channelSet = new ChannelSet <PointWithError>();

                // Detector calibration
                double calibration = ((TOF)((EDMPoint)b.Points[0]).Shot.TOFs[detectorIndex]).Calibration;

                // Divide points into bins depending on switch state
                List <List <double> > statePoints = new List <List <double> >(numStates);
                for (int i = 0; i < numStates; i++)
                {
                    statePoints.Add(new List <double>(blockLength / numStates));
                }
                for (int i = 0; i < blockLength; i++)
                {
                    statePoints[(int)switchStates[i]].Add(pointDetectorData[b.detectors[detectorIndex]][i]);
                }
                int subLength = blockLength / numStates;

                // For each analysis channel, calculate the mean and standard error, then add to ChannelSet
                for (int channel = 0; channel < numStates; channel++)
                {
                    RunningStatistics stats = new RunningStatistics();
                    for (int subIndex = 0; subIndex < subLength; subIndex++)
                    {
                        double onVal  = 0.0;
                        double offVal = 0.0;
                        for (int i = 0; i < numStates; i++)
                        {
                            if (stateSigns[channel, i] == 1)
                            {
                                onVal += statePoints[i][subIndex];
                            }
                            else
                            {
                                offVal += statePoints[i][subIndex];
                            }
                        }
                        onVal  /= numStates;
                        offVal /= numStates;
                        stats.Push(onVal - offVal);
                    }

                    PointWithError pointWithError = new PointWithError()
                    {
                        Value = stats.Mean, Error = stats.StandardErrorOfSampleMean
                    };

                    // add the channel to the ChannelSet
                    List <string> usedSwitches = new List <string>();
                    for (int i = 0; i < modulations.Count; i++)
                    {
                        if ((channel & (1 << i)) != 0)
                        {
                            usedSwitches.Add(modulations[i].Name);
                        }
                    }
                    string[] channelName = usedSwitches.ToArray();
                    // the SIG channel has a special name
                    if (channel == 0)
                    {
                        channelName = new string[] { "SIG" }
                    }
                    ;
                    channelSet.AddChannel(channelName, pointWithError);
                }

                // Add the ChannelSet to the demodulated block
                db.AddDetector(b.detectors[detectorIndex], calibration, channelSet);
            }

            // --This is done for each TOF detector--
            foreach (string d in tofDetectorData.Keys)
            {
                int detectorIndex = b.detectors.IndexOf(d);

                // We obtain one Channel Set for each detector
                ChannelSet <TOFWithError> channelSet = new ChannelSet <TOFWithError>();

                // Detector calibration
                double calibration = ((TOF)((EDMPoint)b.Points[0]).Shot.TOFs[detectorIndex]).Calibration;

                // Divide TOFs into bins depending on switch state
                List <List <TOF> > statePoints = new List <List <TOF> >(numStates);
                for (int i = 0; i < numStates; i++)
                {
                    statePoints.Add(new List <TOF>(blockLength / numStates));
                }
                for (int i = 0; i < blockLength; i++)
                {
                    statePoints[(int)switchStates[i]].Add(tofDetectorData[b.detectors[detectorIndex]][i]);
                }
                int subLength = blockLength / numStates;

                // For each analysis channel, calculate the mean and standard error, then add to ChannelSet
                foreach (int channel in tofChannelsToAnalyse)
                {
                    TOFAccumulator tofAccumulator = new TOFAccumulator();
                    for (int subIndex = 0; subIndex < subLength; subIndex++)
                    {
                        TOF onTOF  = new TOF();
                        TOF offTOF = new TOF();
                        for (int i = 0; i < numStates; i++)
                        {
                            if (stateSigns[channel, i] == 1)
                            {
                                onTOF += statePoints[i][subIndex];
                            }
                            else
                            {
                                offTOF += statePoints[i][subIndex];
                            }
                        }
                        onTOF  /= numStates;
                        offTOF /= numStates;
                        tofAccumulator.Add(onTOF - offTOF);
                    }

                    // add the channel to the ChannelSet
                    List <string> usedSwitches = new List <string>();
                    for (int i = 0; i < modulations.Count; i++)
                    {
                        if ((channel & (1 << i)) != 0)
                        {
                            usedSwitches.Add(modulations[i].Name);
                        }
                    }
                    string[] channelName = usedSwitches.ToArray();
                    // the SIG channel has a special name
                    if (channel == 0)
                    {
                        channelName = new string[] { "SIG" }
                    }
                    ;
                    channelSet.AddChannel(channelName, tofAccumulator.GetResult());
                }

                // If the detector is a molecule detector, add the special channels
                if (MOLECULE_DETECTORS.Contains(d))
                {
                    channelSet = AppendChannelSetWithSpecialValues(channelSet);
                }

                // Add the ChannelSet to the demodulated block
                db.AddDetector(d, calibration, channelSet);
            }

            return(db);
        }
        public void ShortSequences()
        {
            var stats0 = new RunningStatistics(new double[0]);
            Assert.That(stats0.Skewness, Is.NaN);
            Assert.That(stats0.Kurtosis, Is.NaN);

            var stats1 = new RunningStatistics(new[] { 1.0 });
            Assert.That(stats1.Skewness, Is.NaN);
            Assert.That(stats1.Kurtosis, Is.NaN);

            var stats2 = new RunningStatistics(new[] { 1.0, 2.0 });
            Assert.That(stats2.Skewness, Is.NaN);
            Assert.That(stats2.Kurtosis, Is.NaN);

            var stats3 = new RunningStatistics(new[] { 1.0, 2.0, -3.0 });
            Assert.That(stats3.Skewness, Is.Not.NaN);
            Assert.That(stats3.Kurtosis, Is.NaN);

            var stats4 = new RunningStatistics(new[] { 1.0, 2.0, -3.0, -4.0 });
            Assert.That(stats4.Skewness, Is.Not.NaN);
            Assert.That(stats4.Kurtosis, Is.Not.NaN);
        }
Ejemplo n.º 35
0
        public int OnExecute(CommandLineApplication app)
        {
            if (!string.IsNullOrEmpty(this.Connect))
            {
                var uri = new Uri(this.Connect);
                switch (uri.Scheme)
                {
                case "postgresql":
                    string userName = "";
                    string password = "";
                    if (!string.IsNullOrEmpty(uri.UserInfo))
                    {
                        var ss = uri.UserInfo.Split(":");
                        userName = ss[0];
                        password = ss.Length > 1 ? ss[1] : "";
                    }

                    if (!string.IsNullOrEmpty(this.UserName))
                    {
                        userName = this.UserName;
                    }
                    if (!string.IsNullOrEmpty(this.Password))
                    {
                        password = this.Password;
                    }

                    var port          = uri.Port > 0 ? uri.Port : 5432;
                    var database      = uri.AbsolutePath.Trim('/');
                    var connectString = $"Host={uri.Host};Port={port};Username={userName};Password={password};Database={database}";


                    var sql   = "";
                    int index = 0;
                    if (this.TableName.StartsWith("select", StringComparison.InvariantCultureIgnoreCase))
                    {
                        sql = this.TableName;
                    }
                    else
                    {
                        if (this.Index == null || int.TryParse(this.Index, out index))
                        {
                            sql = $"select * from {this.TableName}";
                        }
                        else
                        {
                            sql = $"select {this.Index} from {this.TableName}";
                        }
                    }


                    var dataSource = new DbDataSource(Npgsql.NpgsqlFactory.Instance, connectString);
                    var data       = dataSource.GetData(sql, index);

                    var stats = new  RunningStatistics(data);
                    Console.WriteLine($"总数:{stats.Count}");
                    Console.WriteLine($"最大值:{stats.Maximum}");
                    Console.WriteLine($"最小值:{stats.Minimum}");
                    Console.WriteLine($"平均值Mean::{stats.Mean}");
                    Console.WriteLine($"均方差StandardDeviation: {stats.StandardDeviation}");
                    Console.WriteLine($"{stats.ToString()}");
                    Console.WriteLine($"中位数: {data.Median()}");
                    Console.WriteLine(@"{0} - 有偏方差", data.PopulationVariance().ToString(" #0.00000;-#0.00000"));
                    Console.WriteLine(@"{0} - 无偏方差", data.Variance().ToString(" #0.00000;-#0.00000"));
                    Console.WriteLine(@"{0} - 标准偏差", data.StandardDeviation().ToString(" #0.00000;-#0.00000"));
                    Console.WriteLine(@"{0} - 标准有偏偏差", data.PopulationStandardDeviation().ToString(" #0.00000;-#0.00000"));
                    Console.WriteLine($"25% 中位数:{data.LowerQuartile()}");
                    Console.WriteLine($"75% 中位数:{data.UpperQuartile()}");


                    return(1);

                case "file":
                    this.FilePath = uri.LocalPath;
                    if (Directory.Exists(this.FilePath) && !string.IsNullOrEmpty(this.TableName))
                    {
                        this.FilePath = Path.Combine(this.FilePath, this.TableName);
                    }

                    break;

                default:
                    return(0);
                }
            }

            if (!string.IsNullOrEmpty(this.FilePath))
            {
                IEnumerable <double> data;
                var ext   = Path.GetExtension(this.FilePath);
                var index = 0;


                if (ext.Equals(".shp", StringComparison.InvariantCultureIgnoreCase))
                {
                    if (!int.TryParse(this.Index, out index))
                    {
                        index = ShapeFileHelper.FindIndex(this.FilePath, this.Index);
                    }
                    var shpDataSource = new ShapeDataSource(this.FilePath, index);
                    data = shpDataSource.GetData();
                }
                else
                {
                    if (!int.TryParse(this.Index, out index))
                    {
                        Console.WriteLine("索引参数不对!");
                        return(1);
                    }
                    var dataSource = new CsvDataSource(this.FilePath, index);
                    data = dataSource.GetData();
                }
                var stats = new  RunningStatistics(data);
                Console.WriteLine($"总数:{stats.Count}");
                Console.WriteLine($"最大值:{stats.Maximum}");
                Console.WriteLine($"最小值:{stats.Minimum}");
                Console.WriteLine($"平均值Mean::{stats.Mean}");
                Console.WriteLine($"均方差StandardDeviation: {stats.StandardDeviation}");
                Console.WriteLine($"{stats.ToString()}");
                Console.WriteLine($"中位数: {data.Median()}");
                Console.WriteLine(@"{0} - 有偏方差", data.PopulationVariance().ToString(" #0.00000;-#0.00000"));
                Console.WriteLine(@"{0} - 无偏方差", data.Variance().ToString(" #0.00000;-#0.00000"));
                Console.WriteLine(@"{0} - 标准偏差", data.StandardDeviation().ToString(" #0.00000;-#0.00000"));
                Console.WriteLine(@"{0} - 标准有偏偏差", data.PopulationStandardDeviation().ToString(" #0.00000;-#0.00000"));
                Console.WriteLine($"25% 中位数:{data.LowerQuartile()}");
                Console.WriteLine($"75% 中位数:{data.UpperQuartile()}");
                //new DescriptiveStatistics()

                var histogram = new Histogram(data, 100, stats.Minimum, stats.Maximum);
                //Console.WriteLine($"{histogram.ToString()}");
                for (var i = 0; i < 100; i++)
                {
                    var bucket = histogram[i];
                    Console.WriteLine($"({bucket.LowerBound}, {bucket.UpperBound}] {bucket.Count}");
                }

                return(1);
            }
            app.ShowHelp();
            return(1);
        }
 public void ZeroVarianceSequence()
 {
     var stats = new RunningStatistics(new[] { 2.0, 2.0, 2.0, 2.0 });
     Assert.That(stats.Skewness, Is.NaN);
     Assert.That(stats.Kurtosis, Is.NaN);
 }
Ejemplo n.º 37
0
 public void RunningStatisticsConsistentWithDescriptiveStatistics(string dataSet)
 {
     var data = _data[dataSet];
     var running = new RunningStatistics(data.Data);
     var descriptive = new DescriptiveStatistics(data.Data);
     Assert.That(running.Minimum, Is.EqualTo(descriptive.Minimum), "Minimum");
     Assert.That(running.Maximum, Is.EqualTo(descriptive.Maximum), "Maximum");
     Assert.That(running.Mean, Is.EqualTo(descriptive.Mean).Within(1e-15), "Mean");
     Assert.That(running.Variance, Is.EqualTo(descriptive.Variance).Within(1e-15), "Variance");
     Assert.That(running.StandardDeviation, Is.EqualTo(descriptive.StandardDeviation).Within(1e-15), "StandardDeviation");
     Assert.That(running.Skewness, Is.EqualTo(descriptive.Skewness).Within(1e-15), "Skewness");
     Assert.That(running.Kurtosis, Is.EqualTo(descriptive.Kurtosis).Within(1e-14), "Kurtosis");
 }
Ejemplo n.º 38
0
        public static void Run()
        {
            RILogManager.Default?.SendDebug("MNIST Data Loading...");
            MnistData mnistData = new MnistData(28);

            RILogManager.Default?.SendDebug("Training Start...");

            int           neuronCount = 28;
            FunctionStack nn          = new FunctionStack("Test19",
                                                          new Linear(true, neuronCount * neuronCount, N, name: "l1 Linear"), // L1
                                                          new BatchNormalization(true, N, name: "l1 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l1 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l2 Linear"), // L2
                                                          new BatchNormalization(true, N, name: "l2 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l2 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l3 Linear"), // L3
                                                          new BatchNormalization(true, N, name: "l3 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l3 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l4 Linear"), // L4
                                                          new BatchNormalization(true, N, name: "l4 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l4 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l5 Linear"), // L5
                                                          new BatchNormalization(true, N, name: "l5 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l5 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l6 Linear"), // L6
                                                          new BatchNormalization(true, N, name: "l6 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l6 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l7 Linear"), // L7
                                                          new BatchNormalization(true, N, name: "l7 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l7 ReLU"),
                                                          new Linear(true, N, N, name: "l8 Linear"), // L8
                                                          new BatchNormalization(true, N, name: "l8 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l8 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l9 Linear"), // L9
                                                          new BatchNormalization(true, N, name: "l9 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l9 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l10 Linear"), // L10
                                                          new BatchNormalization(true, N, name: "l10 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l10 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l11 Linear"), // L11
                                                          new BatchNormalization(true, N, name: "l11 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l11 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l12 Linear"), // L12
                                                          new BatchNormalization(true, N, name: "l12 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l12 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l13 Linear"), // L13
                                                          new BatchNormalization(true, N, name: "l13 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l13 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l14 Linear"), // L14
                                                          new BatchNormalization(true, N, name: "l14 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l14 PolynomialApproximantSteep"),
                                                          new Linear(true, N, 10, name: "l15 Linear") // L15
                                                          );


            nn.SetOptimizer(new AdaGrad());
            //nn.SetOptimizer(new Adam());

            RunningStatistics stats             = new RunningStatistics();
            Histogram         lossHistogram     = new Histogram();
            Histogram         accuracyHistogram = new Histogram();
            Real totalLoss        = 0;
            long totalLossCounter = 0;
            Real highestAccuracy  = 0;
            Real bestLocalLoss    = 0;
            Real bestTotalLoss    = 0;

            // First skeleton save
            ModelIO.Save(nn, nn.Name);

            for (int epoch = 0; epoch < 1; epoch++)
            {
                RILogManager.Default?.SendDebug("epoch " + (epoch + 1));
                RILogManager.Default?.ViewerSendWatch("epoch", (epoch + 1));

                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    RILogManager.Default?.SendInformation("batch count " + i + "/" + TRAIN_DATA_COUNT);

                    TestDataSet datasetX = mnistData.GetRandomXSet(BATCH_DATA_COUNT, 28, 28);

                    Real sumLoss = Trainer.Train(nn, datasetX.Data, datasetX.Label, new SoftmaxCrossEntropy());
                    totalLoss += sumLoss;
                    totalLossCounter++;



                    stats.Push(sumLoss);
                    lossHistogram.AddBucket(new Bucket(-10, 10));
                    accuracyHistogram.AddBucket(new Bucket(-10.0, 10));

                    if (sumLoss < bestLocalLoss && sumLoss != Double.NaN)
                    {
                        bestLocalLoss = sumLoss;
                    }
                    if (stats.Mean < bestTotalLoss && sumLoss != Double.NaN)
                    {
                        bestTotalLoss = stats.Mean;
                    }

                    try
                    {
                        lossHistogram.AddData(sumLoss);
                    }
                    catch (Exception)
                    {
                    }

                    if (i % 20 == 0)
                    {
                        RILogManager.Default?.SendDebug("\nbatch count " + i + "/" + TRAIN_DATA_COUNT);
                        RILogManager.Default?.SendDebug("Total/Mean loss " + stats.Mean);
                        RILogManager.Default?.SendDebug("local loss " + sumLoss);

                        RILogManager.Default?.SendInformation("batch count " + i + "/" + TRAIN_DATA_COUNT);
                        RILogManager.Default?.ViewerSendWatch("batch count", i);
                        RILogManager.Default?.ViewerSendWatch("Total/Mean loss", stats.Mean);
                        RILogManager.Default?.ViewerSendWatch("local loss", sumLoss);


                        RILogManager.Default?.SendDebug("");

                        RILogManager.Default?.SendDebug("Testing...");
                        TestDataSet datasetY = mnistData.GetRandomYSet(TEST_DATA_COUNT, 28);
                        Real        accuracy = Trainer.Accuracy(nn, datasetY.Data, datasetY.Label);
                        if (accuracy > highestAccuracy)
                        {
                            highestAccuracy = accuracy;
                        }

                        RILogManager.Default?.SendDebug("Accuracy: " + accuracy);
                        RILogManager.Default?.ViewerSendWatch("Accuracy", accuracy);

                        try
                        {
                            accuracyHistogram.AddData(accuracy);
                        }
                        catch (Exception)
                        {
                        }
                    }
                }
            }

            RILogManager.Default?.SendDebug("Best Accuracy: " + highestAccuracy);
            RILogManager.Default?.SendDebug("Best Total Loss " + bestTotalLoss);
            RILogManager.Default?.SendDebug("Best Local Loss " + bestLocalLoss);

            RILogManager.Default?.ViewerSendWatch("Best Accuracy:", highestAccuracy);
            RILogManager.Default?.ViewerSendWatch("Best Total Loss", bestTotalLoss);
            RILogManager.Default?.ViewerSendWatch("Best Local Loss", bestLocalLoss);

            // Save all with training data
            ModelIO.Save(nn, nn.Name);
        }