public void Run() { if (HasRunBeenCalled()) { throw new InvalidOperationException("Can only call run once."); } Console.WriteLine($"Running for {RunPeriod.TotalSeconds}sec."); //Write the headers, but no histograms (as we don't have any yet). _logWriter.Write(DateTime.Now); //ThreadSafe-writes require a Concurrent implementation of a Histogram //ThreadSafe-reads require a recorder var recorder = HistogramFactory .With64BitBucketSize() //LongHistogram .WithValuesFrom(1) //Default value .WithValuesUpTo(TimeStamp.Minutes(10)) //Default value .WithPrecisionOf(3) //Default value .WithThreadSafeWrites() //Switches internal imp to concurrent version i.e. LongConcurrentHistogram .WithThreadSafeReads() //returns a Recorder that wraps the LongConcurrentHistogram .Create(); var outputThread = new Thread(ts => WriteToDisk((Recorder)ts)); outputThread.Start(recorder); RecordMeasurements(recorder); //Wait for the output thread to complete writing. outputThread.Join(); }
public void CanCreateLongHistogram() { var actual = HistogramFactory.With64BitBucketSize() .Create(); Assert.IsAssignableFrom <LongHistogram>(actual); }
public void CanCreateLongHistogram() { var actual = HistogramFactory.With64BitBucketSize() .Create(); Assert.IsInstanceOf <LongHistogram>(actual); }
protected override HistogramBase Create(long highestTrackableValue, int numberOfSignificantValueDigits) { //return new LongHistogram(highestTrackableValue, numberOfSignificantValueDigits); return(HistogramFactory.With64BitBucketSize() .WithValuesUpTo(highestTrackableValue) .WithPrecisionOf(numberOfSignificantValueDigits) .Create()); }
public void CanCreateLongConcurrentHistogram() { var actual = HistogramFactory.With64BitBucketSize() .WithThreadSafeWrites() .Create(); Assert.IsAssignableFrom <LongConcurrentHistogram>(actual); }
protected override HistogramBase CreateHistogram(long id, long min, long max, int sf) { //return new LongHistogram(id, min, max, sf); return(HistogramFactory.With64BitBucketSize() .WithValuesFrom(min) .WithValuesUpTo(max) .WithPrecisionOf(sf) .Create()); }
protected override Recorder Create(long min, long max, int sf) { return(HistogramFactory.With64BitBucketSize() .WithValuesFrom(min) .WithValuesUpTo(max) .WithPrecisionOf(sf) .WithThreadSafeReads() .Create()); }
protected override HistogramBase CreateHistogram(long id, long min, long max, int sf) { return(HistogramFactory.With64BitBucketSize() .WithValuesFrom(min) .WithValuesUpTo(max) .WithPrecisionOf(sf) .WithThreadSafeWrites() .Create()); }
internal override HistogramBase Create(long highestTrackableValue, int numberOfSignificantValueDigits) { //return new LongConcurrentHistogram(1, highestTrackableValue, numberOfSignificantValueDigits); return(HistogramFactory.With64BitBucketSize() .WithValuesUpTo(highestTrackableValue) .WithPrecisionOf(numberOfSignificantValueDigits) .WithThreadSafeWrites() .Create()); }
static HdrHistogramReservoir() { HdrHistogramReservoir._emptySnapshot = new HdrSnapshot( HistogramFactory .With64BitBucketSize() .WithThreadSafeReads() .Create() .GetIntervalHistogram(), 0, null, 0, null); }
public void CanCreateLongHistogramWithSpecifiedRangeValues(long min, long max, int sf) { var actual = HistogramFactory.With64BitBucketSize() .WithValuesFrom(min) .WithValuesUpTo(max) .WithPrecisionOf(sf) .Create(); Assert.IsAssignableFrom <LongHistogram>(actual); Assert.Equal(min, actual.LowestTrackableValue); Assert.Equal(max, actual.HighestTrackableValue); Assert.Equal(sf, actual.NumberOfSignificantValueDigits); }
public void LongConcurrentHistogramWithSpecifiedRangeValues(long min, long max, int sf) { var actual = HistogramFactory.With64BitBucketSize() .WithValuesFrom(min) .WithValuesUpTo(max) .WithPrecisionOf(sf) .WithThreadSafeWrites() .Create(); Assert.IsInstanceOf <LongConcurrentHistogram>(actual); Assert.AreEqual(min, actual.LowestTrackableValue); Assert.AreEqual(max, actual.HighestTrackableValue); Assert.AreEqual(sf, actual.NumberOfSignificantValueDigits); }
public void CanCreateLongHistogramRecorder(long min, long max, int sf) { var actual = HistogramFactory.With64BitBucketSize() .WithValuesFrom(min) .WithValuesUpTo(max) .WithPrecisionOf(sf) .WithThreadSafeReads() .Create(); var histogram = actual.GetIntervalHistogram(); Assert.IsAssignableFrom <LongHistogram>(histogram); Assert.Equal(min, histogram.LowestTrackableValue); Assert.Equal(max, histogram.HighestTrackableValue); Assert.Equal(sf, histogram.NumberOfSignificantValueDigits); }
/// <summary> /// Initializes a new instance of the <see cref="HdrHistogramReservoir" /> class. /// </summary> public HdrHistogramReservoir(long lowestTrackableValue, long highestTrackableValue, int numberOfSignificantValueDigits) { _highestTrackableValue = highestTrackableValue; var recorder = HistogramFactory .With64BitBucketSize() .WithValuesFrom(lowestTrackableValue) .WithValuesUpTo(highestTrackableValue) .WithPrecisionOf(numberOfSignificantValueDigits) .WithThreadSafeWrites() .WithThreadSafeReads() .Create(); _recorder = recorder; _intervalHistogram = recorder.GetIntervalHistogram(); _runningTotals = new LongHistogram(lowestTrackableValue, highestTrackableValue, _intervalHistogram.NumberOfSignificantValueDigits); }
/// <summary> /// Initializes a new instance of the <see cref="HdrHistogramReservoir" /> class. /// </summary> public HdrHistogramReservoir( long lowestTrackableValue, long highestTrackableValue, int numberOfSignificantValueDigits, long refreshIntervalMilliseconds) { _highestTrackableValue = highestTrackableValue; _refreshIntervalTicks = TimeSpan.FromMilliseconds(refreshIntervalMilliseconds).Ticks; var recorder = HistogramFactory .With64BitBucketSize() .WithValuesFrom(lowestTrackableValue) .WithValuesUpTo(highestTrackableValue) .WithPrecisionOf(numberOfSignificantValueDigits) .WithThreadSafeWrites() .WithThreadSafeReads() .Create(); _recorder = recorder; _intervalHistogram = recorder.GetIntervalHistogram(); UnsafeRefresh(DateTime.UtcNow.Ticks, true); }
public static void Run() { _outputStream = File.Create(LogPath); _logWriter = new HistogramLogWriter(_outputStream); _logWriter.Write(DateTime.Now); var recorder = HistogramFactory .With64BitBucketSize() ?.WithValuesFrom(1) ?.WithValuesUpTo(2345678912345) ?.WithPrecisionOf(3) ?.WithThreadSafeWrites() ?.WithThreadSafeReads() ?.Create(); var accumulatingHistogram = new LongHistogram(2345678912345, 3); var size = accumulatingHistogram.GetEstimatedFootprintInBytes(); RILogManager.Default?.SendDebug("Histogram size = {0} bytes ({1:F2} MB)", size, size / 1024.0 / 1024.0); RILogManager.Default?.SendDebug("Recorded latencies [in system clock ticks]"); accumulatingHistogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.None, useCsvFormat: true); Console.WriteLine(); RILogManager.Default?.SendDebug("Recorded latencies [in usec]"); accumulatingHistogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.TimeStampToMicroseconds, useCsvFormat: true); Console.WriteLine(); RILogManager.Default?.SendDebug("Recorded latencies [in msec]"); accumulatingHistogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.TimeStampToMilliseconds, useCsvFormat: true); Console.WriteLine(); RILogManager.Default?.SendDebug("Recorded latencies [in sec]"); accumulatingHistogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.TimeStampToSeconds, useCsvFormat: true); DocumentResults(accumulatingHistogram, recorder); RILogManager.Default?.SendDebug("Build Vocabulary."); DocumentResults(accumulatingHistogram, recorder); Vocabulary vocabulary = new Vocabulary(); DocumentResults(accumulatingHistogram, recorder); string trainPath = InternetFileDownloader.Download(DOWNLOAD_URL + TRAIN_FILE, TRAIN_FILE); DocumentResults(accumulatingHistogram, recorder); string validPath = InternetFileDownloader.Download(DOWNLOAD_URL + VALID_FILE, VALID_FILE); DocumentResults(accumulatingHistogram, recorder); string testPath = InternetFileDownloader.Download(DOWNLOAD_URL + TEST_FILE, TEST_FILE); DocumentResults(accumulatingHistogram, recorder); int[] trainData = vocabulary.LoadData(trainPath); DocumentResults(accumulatingHistogram, recorder); int[] validData = vocabulary.LoadData(validPath); DocumentResults(accumulatingHistogram, recorder); int[] testData = vocabulary.LoadData(testPath); DocumentResults(accumulatingHistogram, recorder); int nVocab = vocabulary.Length; RILogManager.Default?.SendDebug("Network Initializing."); FunctionStack model = new FunctionStack("Test10", new EmbedID(nVocab, N_UNITS, name: "l1 EmbedID"), new Dropout(), new LSTM(true, N_UNITS, N_UNITS, name: "l2 LSTM"), new Dropout(), new LSTM(true, N_UNITS, N_UNITS, name: "l3 LSTM"), new Dropout(), new Linear(true, N_UNITS, nVocab, name: "l4 Linear") ); DocumentResults(accumulatingHistogram, recorder); // Do not cease at the given threshold, correct the rate by taking the rate from L2Norm of all parameters GradientClipping gradientClipping = new GradientClipping(threshold: GRAD_CLIP); SGD sgd = new SGD(learningRate: 1); model.SetOptimizer(gradientClipping, sgd); DocumentResults(accumulatingHistogram, recorder); Real wholeLen = trainData.Length; int jump = (int)Math.Floor(wholeLen / BATCH_SIZE); int epoch = 0; Stack <NdArray[]> backNdArrays = new Stack <NdArray[]>(); RILogManager.Default?.SendDebug("Train Start."); double dVal; NdArray x = new NdArray(new[] { 1 }, BATCH_SIZE, (Function)null); NdArray t = new NdArray(new[] { 1 }, BATCH_SIZE, (Function)null); for (int i = 0; i < jump * N_EPOCH; i++) { for (int j = 0; j < BATCH_SIZE; j++) { x.Data[j] = trainData[(int)((jump * j + i) % wholeLen)]; t.Data[j] = trainData[(int)((jump * j + i + 1) % wholeLen)]; } NdArray[] result = model.Forward(true, x); Real sumLoss = new SoftmaxCrossEntropy().Evaluate(result, t); backNdArrays.Push(result); RILogManager.Default?.SendDebug("[{0}/{1}] Loss: {2}", i + 1, jump, sumLoss); //Run truncated BPTT if ((i + 1) % BPROP_LEN == 0) { for (int j = 0; backNdArrays.Count > 0; j++) { RILogManager.Default?.SendDebug("backward" + backNdArrays.Count); model.Backward(true, backNdArrays.Pop()); } model.Update(); model.ResetState(); } if ((i + 1) % jump == 0) { epoch++; RILogManager.Default?.SendDebug("evaluate"); dVal = Evaluate(model, validData); RILogManager.Default?.SendDebug($"validation perplexity: {dVal}"); if (epoch >= 6) { sgd.LearningRate /= 1.2; RILogManager.Default?.SendDebug("learning rate =" + sgd.LearningRate); } } DocumentResults(accumulatingHistogram, recorder); } RILogManager.Default?.SendDebug("test start"); dVal = Evaluate(model, testData); RILogManager.Default?.SendDebug("test perplexity:" + dVal); DocumentResults(accumulatingHistogram, recorder); _logWriter.Dispose(); _outputStream.Dispose(); RILogManager.Default?.SendDebug("Log contents"); RILogManager.Default?.SendDebug(File.ReadAllText(LogPath)); Console.WriteLine(); RILogManager.Default?.SendDebug("Percentile distribution (values reported in milliseconds)"); accumulatingHistogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.TimeStampToMilliseconds, useCsvFormat: true); RILogManager.Default?.SendDebug("Mean: " + BytesToString(accumulatingHistogram.GetMean()) + ", StdDev: " + BytesToString(accumulatingHistogram.GetStdDeviation())); }
static void Main(string[] args) { //Comment out here if you want to run all on .Net Framework //Weaver.Initialize(ComputeDeviceTypes.Gpu); //Weaver.Initialize(ComputeDeviceTypes.Cpu, 0); //Subscript required if there are multiple devices var recorder = HistogramFactory .With64BitBucketSize() ?.WithValuesFrom(1) ?.WithValuesUpTo(2345678912345) ?.WithPrecisionOf(3) ?.WithThreadSafeWrites() ?.WithThreadSafeReads() ?.Create(); //Learning XOR with MLP //recorder.Record(() => Test1.Run()); //Learning XOR with MLP 【Returned version】 //Test2.Run(); //Learning of Sin function by MLP //recorder.Record(() => Test3.Run()); //Learning of MNIST (Handwritten Characters) by MLP //Test4.Run(); //Reproduction of Excel CNN //recorder.Record(() => Test5.Run()); //Learning of MNIST by 5-layer CNN //recorder.Record(() => Test6.Run()); //Learning of MNIST by 15-tier MLP using BatchNorm //Test7.Run(); //Learning of Sin function by LSTM //Test8.Run(); //RNNLM with Simple RNN //Test9.Run(); //RNNLM by LSTM //Test10.Run(); //Decoupled Neural Interfaces using Synthetic Gradients by learning MNIST //Test11.Run(); //DNI of Test 11 was defined as cDNI //Test12.Run(); //Test of Deconvolution 2D(Winform) //new Test13WinForm().ShowDialog(); //Concatenate Test 6 and execute //Test14.Run(); //Test to read VGA 16 of Caffe model and classify images //Test15.Run(); //Load and execute the same content as Test 5 of Chainer model //Test16.Run(); //Test that reads RESNET of Caffe model and classifies images //Test17.Run(Test17.ResnetModel.ResNet152); //Please select any Resnet model //Learn CIFAR-10 with 5-layer CNN //Test18.Run(); //Partial execution of Linear //TestX.Run(); // LeakyReLu and PolynomialApproximantSteep combination network //Test19.Run(); // 1000 layer neural network //Test20.Run(); // MNIST (Handwritten Characters) by MLP Accuracy Tester 99.79 is goal Test21.Run(); //benchmark //SingleBenchmark.Run(true); RILogManager.Default?.SendDebug("Test Done..."); Console.WriteLine("Test Complete, press any key to end"); Console.Read(); }