public async Task StartAsync(IDialogContext context) { network = new ClassificationNetwork(); network.GetInitilizationData(); network.InitializeNeuralNetwork(); context.Wait(this.MessageReceivedAsync); }
public void SaveToMemoryTest() { ClassificationNetwork net2 = ClassificationNetwork.FromMemory(this.net.SaveToMemory(NetworkFileFormat.JSON)); string s1 = JsonConvert.SerializeObject(this.net); string s2 = JsonConvert.SerializeObject(net2); Assert.AreEqual(s1, s2); }
public void SaveToStringTest() { string s = this.net.SaveToString(NetworkFileFormat.JSON); ClassificationNetwork net2 = ClassificationNetwork.FromString(s); string s1 = JsonConvert.SerializeObject(this.net); string s2 = JsonConvert.SerializeObject(net2); Assert.AreEqual(s1, s2); }
public TestImageProvider <string> CreateTestImageProvider(ClassificationNetwork network) { Shape shape = network.InputShape; return(TestImageProvider <string> .CreateFromJson( 0, 2 *shape.GetAxis(Axis.Y), network.Classes, network.BlankClass, this.Configuration.DataProvider)); }
public void SaveToFileTest() { string tempFileName = Path.GetTempFileName(); try { this.net.SaveToFile(tempFileName, NetworkFileFormat.JSON); ClassificationNetwork net2 = ClassificationNetwork.FromFile(tempFileName); Assert.AreEqual(JsonConvert.SerializeObject(this.net), JsonConvert.SerializeObject(net2)); } finally { File.Delete(tempFileName); } }
public void ForwardToProbability() { ClassificationNetwork network = ClassificationNetwork.FromArchitecture("1x1x2~5N~5N~3N", this.classes); Tensor x = new Tensor(null, new Shape(Shape.BWHC, 1, 1, 1, 2)); x.Set(new float[] { 0.2f, -0.3f }); Tensor probability = network.Forward(null, x); Assert.AreEqual(3, probability.Length); // 3 classes output for (int i = 0; i < probability.Length; i++) { Assert.IsTrue(probability.Weights[i] > 0.0); Assert.IsTrue(probability.Weights[i] < 1.0); } Assert.AreEqual(1.0, probability.Weights[0] + probability.Weights[1] + probability.Weights[2], 1e-6); }
public void ForwardVolumes() { Random random = new Random(0); ClassificationNetworkTrainer trainer = new ClassificationNetworkTrainer(); SGD sgd = new SGD() { LearningRate = 0.0001f, Momentum = 0.0f }; ClassificationNetwork network = ClassificationNetwork.FromArchitecture("1x1x2~5N~5N~3N", this.classes); // lets test 100 random point and label settings // note that this should work since l2 and l1 regularization are off // an issue is that if step size is too high, this could technically fail... for (int k = 0; k < 100; k++) { int gti = (int)Math.Floor(random.NextDouble() * 3); Tensor x = new Tensor(null, new Shape(Shape.BWHC, 1, 1, 1, 2)); x.Set(new float[] { ((float)random.NextDouble() * 2) - 1, ((float)random.NextDouble() * 2) - 1 }); Tensor pv = network.Forward(null, x).Clone() as Tensor; trainer.RunEpoch( k, network, Enumerable.Repeat((x, new string[] { this.classes[gti] }), 1), sgd, new LogLikelihoodLoss(), CancellationToken.None); Tensor pv2 = network.Forward(null, x).Clone() as Tensor; Assert.IsTrue(pv2.Weights[gti] > pv.Weights[gti], "k: {0}, gti: {1}, pv2[gti]: {2}, pv[gti]: {3}", k, gti, pv2.Weights[gti], pv.Weights[gti]); } }
private void Test() { ClassificationNetwork network = ClassificationNetwork.FromFile(this.options.NetworkFileName); List <ClassificationResult <string> > results = new List <ClassificationResult <string> >(); using (TestImageProvider <string> dataProvider = this.options.CreateTestImageProvider(network)) { ////Context model = Context.FromRegex(@"\d{1,5}", CultureInfo.InvariantCulture); ////int n = 0; foreach (TestImage sample in dataProvider.Generate(network.AllowedClasses)) { Interlocked.Increment(ref this.totalImages); ////sample.Image.Save("e:\\temp\\" + sample.Label + "_" + n.ToString(CultureInfo.InvariantCulture) + ".bmp"); ////n++; ////if (n < 171) continue; this.localTimeCounter.Restart(); Tensor x = ImageExtensions.FromImage( sample.Image, null, Shape.BWHC, network.InputShape.GetAxis(Axis.X), network.InputShape.GetAxis(Axis.Y)); IList <IList <(string Answer, float Probability)> > answers = network.Execute(x).Answers; ////(IList<(string Answer, float Probability)> answers, _) = network.ExecuteSequence(x, model); this.localTimeCounter.Stop(); long duration = this.localTimeCounter.ElapsedMilliseconds; foreach (IList <(string answer, float probability)> answer in answers) { string text = answer.FirstOrDefault().answer; float prob = answer.FirstOrDefault().probability; results.Add(new ClassificationResult <string>( sample.SourceId, text, string.Concat(sample.Labels), prob, prob >= 0.38f)); this.WriteLine( null, "({0})\tFile: {1} ... OK ({2} ms) {3} {4:F4}", this.totalImages, sample.SourceId.ToFileName(false), duration, text, prob); } /*string answer = answers.Last().FirstOrDefault()?.Answer; * int prob = (int)(((answers.Last().FirstOrDefault()?.Probability ?? 0.0f) * 100) + 0.5f); * * results.Add(new ClassificationResult<string>( * sample.SourceId, * answer, * string.Concat(sample.Labels), * prob, * prob >= 0.38f)); * * ////this.Write("."); * this.Write( * null, * "({0})\tFile: {1} ... OK ({4} ms) {2} {3:F4}", * this.totalImages, * sample.SourceId.ToFileName(false), * duration, * answer, * prob);*/ } } // write report ClassificationReport <string> testReport = new ClassificationReport <string>(results); using (StreamWriter outputFile = File.CreateText(this.options.OutputFileName)) { ClassificationReportWriter <string> .WriteReport(outputFile, testReport, ClassificationReportMode.All); } }
/// <summary> /// Initializes a new instance of the <see cref="TextReader"/> class. /// </summary> /// <param name="fileName">A string that contains the name of the file from which to create the reader's <see cref="ClassificationNetwork"/>.</param> public TextReader(string fileName) { this.network = ClassificationNetwork.FromFile(fileName); }
/// <summary> /// Performs one epoch of SGD algorithm. /// </summary> /// <param name="epoch">The zero-based index of learning epoch.</param> /// <param name="net">The network to train.</param> /// <param name="input">The sequence of learning samples.</param> /// <param name="algorithm">The training algorithm.</param> /// <param name="lossFunction">The loss function.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns> /// The <see cref="TrainingResult"/> object that contains training results for the epoch. /// </returns> public TrainingResult RunEpoch( int epoch, ClassificationNetwork net, IEnumerable <(Tensor Data, string[] Label)> input,
private void Learn(int taskIndex, LearningTask task, CancellationToken cancellationToken) { using (StreamWriter logFile = File.CreateText(task.LogFileName)) { logFile.AutoFlush = true; try { // report starting time DateTime dateStarted = DateTime.Now; this.WriteLine(logFile, string.Format(CultureInfo.InvariantCulture, "Started: {0}", dateStarted.ToString("G", CultureInfo.InvariantCulture))); ClassificationNetwork net = File.Exists(task.Architecture) ? ClassificationNetwork.FromFile(task.Architecture) : ClassificationNetwork.FromArchitecture(task.Architecture, task.Classes, task.Classes, task.BlankClass); // learning Learn(); net.SaveToFile(task.OutputFileName, NetworkFileFormat.JSON); // report finish time and processing interval DateTime dateFinished = DateTime.Now; this.WriteLine(logFile, string.Empty); this.WriteLine(logFile, string.Format(CultureInfo.InvariantCulture, "Finished: {0:G}", dateFinished)); this.WriteLine(logFile, string.Format(CultureInfo.InvariantCulture, "Total time: {0:g}", TimeSpan.FromSeconds((dateFinished - dateStarted).TotalSeconds))); void Learn() { this.WriteLine(logFile, "Learning..."); ImageDistortion filter = new ImageDistortion(); Stopwatch timer = new Stopwatch(); this.WriteLine(logFile, " Epochs: {0}", task.Epochs); this.WriteTrainerParameters(logFile, task.Trainer, task.Algorithm, task.Loss); this.WriteLine(logFile, "Image distortion:"); this.WriteLine(logFile, " Shift: {0}", task.Shift); this.WriteLine(logFile, " Rotate: {0}", task.Rotate); this.WriteLine(logFile, " Scale: {0}", task.Scale); this.WriteLine(logFile, " Crop: {0}", task.Crop); Shape shape = net.InputShape; using (TestImageProvider <string> dataProvider = task.CreateDataProvider(net)) { using (TestImageProvider <string> testDataProvider = task.CreateTestDataProvider(net)) { ////int n = 0; for (int epoch = 0; epoch < task.Epochs; epoch++) { // run learning timer.Restart(); TrainingResult result = task.Trainer.RunEpoch( epoch, net, GenerateLearnSamples(dataProvider, epoch), task.Algorithm, task.Loss, cancellationToken); timer.Stop(); lock (this.logLocker) { string s = string.Format( CultureInfo.InvariantCulture, "Net: {0}, Epoch: {1}, Time: {2} ms, {3}", taskIndex, epoch, timer.ElapsedMilliseconds, result); this.Write(logFile, s); ////this.WriteDebugInformation(logFile); this.WriteLine(logFile, string.Empty); } // run testing string epochOutputFileName = string.Format(CultureInfo.InvariantCulture, task.EpochFileNameTemplate, epoch); // save network net.SaveToFile(epochOutputFileName, NetworkFileFormat.JSON); // run testing List <ClassificationResult <string> > results = new List <ClassificationResult <string> >(); if (task.Loss is CTCLoss) { Context model = Context.FromRegex(@"\d", CultureInfo.InvariantCulture); foreach ((TestImage image, string[] labels) in GenerateTestSamples(testDataProvider)) { if (image.Image.IsAllWhite()) { results.Add(new ClassificationResult <string>( image.SourceId, "0", string.Concat(labels), 1.0f, true)); } else { Tensor x = ImageExtensions.FromImage(image.Image, null, Shape.BWHC, shape.GetAxis(Axis.X), shape.GetAxis(Axis.Y)); (string text, float prob) = net.ExecuteSequence(x, model).Answers.FirstOrDefault(); results.Add(new ClassificationResult <string>( image.SourceId, text, string.Concat(labels), prob, prob >= 0.38f)); } } } else { foreach ((TestImage image, string[] labels) in GenerateTestSamples(testDataProvider)) { if (image.Image.IsAllWhite()) { results.Add(new ClassificationResult <string>( image.SourceId, "0", string.Concat(labels), 1.0f, true)); } else { Tensor x = ImageExtensions.FromImage(image.Image, null, Shape.BWHC, shape.GetAxis(Axis.X), shape.GetAxis(Axis.Y)); foreach (IList <(string answer, float probability)> answer in net.Execute(x).Answers) { string text = answer.FirstOrDefault().answer; float prob = answer.FirstOrDefault().probability; results.Add(new ClassificationResult <string>( image.SourceId, text, string.Concat(labels), prob, prob >= 0.38f)); } } } } // write report ClassificationReport <string> testReport = new ClassificationReport <string>(results); this.Write(logFile, ClassificationReportWriter <string> .WriteReport(testReport, ClassificationReportMode.Summary)); using (StreamWriter outputFile = File.CreateText(Path.ChangeExtension(epochOutputFileName, ".res"))) { ClassificationReportWriter <string> .WriteReport(outputFile, testReport, ClassificationReportMode.All); } } } IEnumerable <(Tensor x, string[] labels)> GenerateLearnSamples(TestImageProvider <string> provider, int epoch) { return(GenerateSamples(provider) .Where(x => !x.image.Image.IsAllWhite()) .SelectMany(x => { if (epoch == 0) { ////x.Image.Save("e:\\temp\\" + x.Id + "_" + n.ToString(CultureInfo.InvariantCulture) + "_.bmp"); } return filter .Distort( x.image.Image, shape.GetAxis(Axis.X), shape.GetAxis(Axis.Y), task.Shift, task.Rotate && x.image.FontStyle != FontStyle.Italic, task.Scale, task.Crop) .Select(bitmap => { if (epoch == 0) { ////Interlocked.Increment(ref n); ////bitmap.Save(@"d:\dnn\temp\" + n.ToString(CultureInfo.InvariantCulture) + ".bmp"); ////bitmap.Save(@"d:\dnn\temp\" + (n).ToString(CultureInfo.InvariantCulture) + "_" + x.SourceId.Id + ".bmp"); } return (ImageExtensions.FromImage(bitmap, null, Shape.BWHC, shape.GetAxis(Axis.X), shape.GetAxis(Axis.Y)), x.labels); }); })); } IEnumerable <(TestImage image, string[] labels)> GenerateTestSamples(TestImageProvider <string> provider) { return(GenerateSamples(provider) .AsParallel() .AsOrdered() .WithCancellation(cancellationToken) .WithMergeOptions(ParallelMergeOptions.AutoBuffered)); } IEnumerable <(TestImage image, string[] labels)> GenerateSamples(TestImageProvider <string> provider) { return(provider .Generate(net.AllowedClasses) .Select(x => { string[] labels = x.Labels; if (!(task.Loss is CTCLoss)) { int b = net.OutputShapes.First().GetAxis(Axis.B); if (labels.Length == 1 && b > 1) { labels = Enumerable.Repeat(labels[0], b).ToArray(); } } return (x, labels); })); } } } } finally { logFile.Flush(); } } }
public void XorTest1() { const int AlphabetSize = 16; const int VectorSize = 4; const int BatchSize = 3000; const int Epochs = 200; const int TestBatchSize = 3000; Random random = new Random(0); string[] classes = Enumerable.Range(0, AlphabetSize).Select(v => v.ToString(CultureInfo.InvariantCulture)).ToArray(); ClassificationNetwork network = ClassificationNetwork.FromArchitecture("1x1x4~80-80-80-16LSTM", classes); float[] vectors = new RandomGeneratorF().Generate(AlphabetSize * VectorSize); (Tensor, int[]) createSample(int size) { Tensor input = new Tensor(null, new[] { size, 1, 1, VectorSize }); int[] truth = new int[size]; int v = 0; for (int i = 0; i < size; i++) { v ^= random.Next(0, AlphabetSize); Vectors.Copy(VectorSize, vectors, v * VectorSize, input.Weights, i * VectorSize); if (i > 0) { truth[i - 1] = v; } } return(input, truth); } // train the network Trainer <int[]> trainer = new Trainer <int[]>() { ClipValue = 2.0f }; SGD sgd = new SGD(); ILoss <int[]> loss = new LogLikelihoodLoss(); for (int epoch = 0; epoch < Epochs; epoch++) { (Tensor, int[])sample = createSample(BatchSize); TrainingResult result = trainer.RunEpoch( network, Enumerable.Repeat(sample, 1), epoch, sgd, loss, CancellationToken.None); Console.WriteLine(result.CostLoss); } // test the network (Tensor x, int[] expected) = createSample(TestBatchSize); Tensor y = network.Forward(null, x); ////y.Reshape(testBatchSize - 1); ////expected.Reshape(testBatchSize - 1); float error = loss.Loss(y, expected, false); Console.WriteLine(y); Console.WriteLine(expected); ////Console.WriteLine(y.Axes[1]); Console.WriteLine(error); ////Assert.IsTrue(errorL1 < 0.01, errorL1.ToString(CultureInfo.InvariantCulture)); }
public void BeforeEach() { this.net = ClassificationNetwork.FromArchitecture("10x10x2~5N~5N~3N", this.classes); }