public void CanAppendHistogram()
        {
            var histogram1 = Create(DefaultHighestTrackableValue, DefaultSignificantDigits);

            histogram1.RecordValue(1);
            histogram1.RecordValue((long.MaxValue / 2) + 1);
            histogram1.SetTimes();
            var histogram2 = Create(DefaultHighestTrackableValue, DefaultSignificantDigits);

            histogram2.RecordValue(2);
            histogram2.SetTimes();

            byte[] data;
            using (var writerStream = new MemoryStream())
                using (var log = new HistogramLogWriter(writerStream))
                {
                    log.Append(histogram1);
                    log.Append(histogram2);
                    data = writerStream.ToArray();
                }
            var actualHistograms = data.ReadHistograms();

            Assert.Equal(2, actualHistograms.Length);
            HistogramAssert.AreValueEqual(histogram1, actualHistograms.First());
            HistogramAssert.AreValueEqual(histogram2, actualHistograms.Skip(1).First());
        }
        public static byte[] WriteLog(this HistogramBase histogram)
        {
            var startTimeWritten = histogram.StartTimeStamp.ToDateFromMillisecondsSinceEpoch();

            byte[] data;
            using (var writerStream = new MemoryStream())
            {
                HistogramLogWriter.Write(writerStream, startTimeWritten, histogram);
                data = writerStream.ToArray();
            }
            return(data);
        }
        public static void Run()
        {
            Console.WriteLine($"Running for {RunPeriod.TotalSeconds}sec.");

            _outputStream = File.Create(LogPath);
            _logWriter = new HistogramLogWriter(_outputStream);
            //Write the headers, but no histograms (as we don't have any yet).
            _logWriter.Write(DateTime.Now);

            var outputThread = new Thread(ts => WriteToDisk());
            outputThread.Start();
            RecordMeasurements();
        }
        public void CanReadEmptyLog()
        {
            byte[] data;
            var    startTimeWritten  = DateTime.Now;
            var    expectedStartTime = startTimeWritten.SecondsSinceUnixEpoch()
                                       .Round(3);

            using (var writerStream = new MemoryStream())
            {
                HistogramLogWriter.Write(writerStream, startTimeWritten);
                data = writerStream.ToArray();
            }

            using (var readerStream = new MemoryStream(data))
            {
                var reader = new HistogramLogReader(readerStream);
                Assert.Empty(reader.ReadHistograms().ToList());
                var actualStartTime = reader.GetStartTime().SecondsSinceUnixEpoch().Round(3);
                Assert.Equal(expectedStartTime, actualStartTime);
            }
        }
Пример #5
0
        public static void Run()
        {
            _outputStream = File.Create(LogPath);

            _logWriter = new HistogramLogWriter(_outputStream);
            _logWriter.Write(DateTime.Now);

            var recorder = HistogramFactory
                           .With64BitBucketSize()
                           ?.WithValuesFrom(1)
                           ?.WithValuesUpTo(2345678912345)
                           ?.WithPrecisionOf(3)
                           ?.WithThreadSafeWrites()
                           ?.WithThreadSafeReads()
                           ?.Create();

            var accumulatingHistogram = new LongHistogram(2345678912345, 3);

            var size = accumulatingHistogram.GetEstimatedFootprintInBytes();

            RILogManager.Default?.SendDebug("Histogram size = {0} bytes ({1:F2} MB)", size, size / 1024.0 / 1024.0);


            RILogManager.Default?.SendDebug("Recorded latencies [in system clock ticks]");
            accumulatingHistogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.None, useCsvFormat: true);
            Console.WriteLine();

            RILogManager.Default?.SendDebug("Recorded latencies [in usec]");
            accumulatingHistogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.TimeStampToMicroseconds, useCsvFormat: true);
            Console.WriteLine();

            RILogManager.Default?.SendDebug("Recorded latencies [in msec]");
            accumulatingHistogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.TimeStampToMilliseconds, useCsvFormat: true);
            Console.WriteLine();

            RILogManager.Default?.SendDebug("Recorded latencies [in sec]");
            accumulatingHistogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.TimeStampToSeconds, useCsvFormat: true);

            DocumentResults(accumulatingHistogram, recorder);

            RILogManager.Default?.SendDebug("Build Vocabulary.");

            DocumentResults(accumulatingHistogram, recorder);

            Vocabulary vocabulary = new Vocabulary();

            DocumentResults(accumulatingHistogram, recorder);

            string trainPath = InternetFileDownloader.Download(DOWNLOAD_URL + TRAIN_FILE, TRAIN_FILE);

            DocumentResults(accumulatingHistogram, recorder);

            string validPath = InternetFileDownloader.Download(DOWNLOAD_URL + VALID_FILE, VALID_FILE);

            DocumentResults(accumulatingHistogram, recorder);

            string testPath = InternetFileDownloader.Download(DOWNLOAD_URL + TEST_FILE, TEST_FILE);

            DocumentResults(accumulatingHistogram, recorder);


            int[] trainData = vocabulary.LoadData(trainPath);
            DocumentResults(accumulatingHistogram, recorder);

            int[] validData = vocabulary.LoadData(validPath);
            DocumentResults(accumulatingHistogram, recorder);

            int[] testData = vocabulary.LoadData(testPath);
            DocumentResults(accumulatingHistogram, recorder);

            int nVocab = vocabulary.Length;

            RILogManager.Default?.SendDebug("Network Initializing.");
            FunctionStack model = new FunctionStack("Test10",
                                                    new EmbedID(nVocab, N_UNITS, name: "l1 EmbedID"),
                                                    new Dropout(),
                                                    new LSTM(true, N_UNITS, N_UNITS, name: "l2 LSTM"),
                                                    new Dropout(),
                                                    new LSTM(true, N_UNITS, N_UNITS, name: "l3 LSTM"),
                                                    new Dropout(),
                                                    new Linear(true, N_UNITS, nVocab, name: "l4 Linear")
                                                    );

            DocumentResults(accumulatingHistogram, recorder);

            // Do not cease at the given threshold, correct the rate by taking the rate from L2Norm of all parameters
            GradientClipping gradientClipping = new GradientClipping(threshold: GRAD_CLIP);
            SGD sgd = new SGD(learningRate: 1);

            model.SetOptimizer(gradientClipping, sgd);
            DocumentResults(accumulatingHistogram, recorder);

            Real wholeLen = trainData.Length;
            int  jump     = (int)Math.Floor(wholeLen / BATCH_SIZE);
            int  epoch    = 0;

            Stack <NdArray[]> backNdArrays = new Stack <NdArray[]>();

            RILogManager.Default?.SendDebug("Train Start.");
            double  dVal;
            NdArray x = new NdArray(new[] { 1 }, BATCH_SIZE, (Function)null);
            NdArray t = new NdArray(new[] { 1 }, BATCH_SIZE, (Function)null);

            for (int i = 0; i < jump * N_EPOCH; i++)
            {
                for (int j = 0; j < BATCH_SIZE; j++)
                {
                    x.Data[j] = trainData[(int)((jump * j + i) % wholeLen)];
                    t.Data[j] = trainData[(int)((jump * j + i + 1) % wholeLen)];
                }

                NdArray[] result  = model.Forward(true, x);
                Real      sumLoss = new SoftmaxCrossEntropy().Evaluate(result, t);
                backNdArrays.Push(result);
                RILogManager.Default?.SendDebug("[{0}/{1}] Loss: {2}", i + 1, jump, sumLoss);

                //Run truncated BPTT
                if ((i + 1) % BPROP_LEN == 0)
                {
                    for (int j = 0; backNdArrays.Count > 0; j++)
                    {
                        RILogManager.Default?.SendDebug("backward" + backNdArrays.Count);
                        model.Backward(true, backNdArrays.Pop());
                    }

                    model.Update();
                    model.ResetState();
                }

                if ((i + 1) % jump == 0)
                {
                    epoch++;
                    RILogManager.Default?.SendDebug("evaluate");
                    dVal = Evaluate(model, validData);
                    RILogManager.Default?.SendDebug($"validation perplexity: {dVal}");

                    if (epoch >= 6)
                    {
                        sgd.LearningRate /= 1.2;
                        RILogManager.Default?.SendDebug("learning rate =" + sgd.LearningRate);
                    }
                }
                DocumentResults(accumulatingHistogram, recorder);
            }

            RILogManager.Default?.SendDebug("test start");
            dVal = Evaluate(model, testData);
            RILogManager.Default?.SendDebug("test perplexity:" + dVal);
            DocumentResults(accumulatingHistogram, recorder);

            _logWriter.Dispose();
            _outputStream.Dispose();


            RILogManager.Default?.SendDebug("Log contents");
            RILogManager.Default?.SendDebug(File.ReadAllText(LogPath));
            Console.WriteLine();
            RILogManager.Default?.SendDebug("Percentile distribution (values reported in milliseconds)");
            accumulatingHistogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.TimeStampToMilliseconds, useCsvFormat: true);

            RILogManager.Default?.SendDebug("Mean: " + BytesToString(accumulatingHistogram.GetMean()) + ", StdDev: " +
                                            BytesToString(accumulatingHistogram.GetStdDeviation()));
        }
        public void CanAppendHistogram()
        {
            var histogram1 = Create(DefaultHighestTrackableValue, DefaultSignificantDigits);
            histogram1.RecordValue(1);
            histogram1.RecordValue((long.MaxValue / 2) + 1);
            histogram1.SetTimes();
            var histogram2 = Create(DefaultHighestTrackableValue, DefaultSignificantDigits);
            histogram2.RecordValue(2);
            histogram2.SetTimes();

            byte[] data;
            using (var writerStream = new MemoryStream())
            using (var log = new HistogramLogWriter(writerStream))
            {
                log.Append(histogram1);
                log.Append(histogram2);
                data = writerStream.ToArray();
            }
            var actualHistograms = data.ReadHistograms();

            Assert.AreEqual(2, actualHistograms.Length);
            HistogramAssert.AreValueEqual(histogram1, actualHistograms.First());
            HistogramAssert.AreValueEqual(histogram2, actualHistograms.Skip(1).First());
        }
 public RecorderExample()
 {
     _outputStream = File.Create(LogPath);
     _logWriter = new HistogramLogWriter(_outputStream);
 }
Пример #8
0
 public RecorderExample()
 {
     _outputStream = File.Create(LogPath);
     _logWriter    = new HistogramLogWriter(_outputStream);
 }