private static void test_cifar_csvtrain(string filename, string weightfile) { Network net = Parser.parse_network_cfg(filename); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } Data.Data test = Data.Data.load_all_cifar10(); Matrix pred = Network.network_predict_data(net, test); int i; for (i = 0; i < test.X.Rows; ++i) { Image im = new Image(32, 32, 3, test.X.Vals[i]); LoadArgs.flip_image(im); } Matrix pred2 = Network.network_predict_data(net, test); pred.scale_matrix(.5f); pred2.scale_matrix(.5f); Matrix.matrix_add_matrix(pred2, pred); pred.to_csv(); Console.Error.Write($"Accuracy: %f\n", Matrix.matrix_topk_accuracy(test.Y, pred, 1)); }
private static void eval_cifar_csv() { Data.Data test = Data.Data.load_cifar10_data("Data.Data/cifar/cifar-10-batches-bin/test_batch.bin"); Matrix pred = new Matrix("results/combined.csv"); Console.Error.Write($"%d %d\n", pred.Rows, pred.Cols); Console.Error.Write($"Accuracy: %f\n", Matrix.matrix_topk_accuracy(test.Y, pred, 1)); }
private static void train_cifar(string cfgfile, string weightfile) { float avgLoss = -1; string basec = Utils.Basecfg(cfgfile); Console.Write($"%s\n", basec); Network net = Parser.parse_network_cfg(cfgfile); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } Console.Write($"Learning Rate: %g, Momentum: %g, Decay: %g\n", net.LearningRate, net.Momentum, net.Decay); string backupDirectory = "/home/pjreddie/backup/"; int n = 50000; Data.Data.get_labels("Data.Data/cifar/labels.txt"); int epoch = (net.Seen) / n; Data.Data train = Data.Data.load_all_cifar10(); var sw = new Stopwatch(); while (Network.get_current_batch(net) < net.MaxBatches || net.MaxBatches == 0) { sw.Reset(); sw.Start(); float loss = Network.train_network_sgd(net, train, 1); if (avgLoss == -1) { avgLoss = loss; } avgLoss = avgLoss * .95f + loss * .05f; sw.Stop(); Console.Write( $"{Network.get_current_batch(net)}, {net.Seen / n:.3}: {loss}, {avgLoss} avg, {Network.get_current_rate(net)} rate, {sw.Elapsed.Seconds} seconds, {net.Seen} images\n"); if (net.Seen / n > epoch) { epoch = net.Seen / n; string buff = $"{backupDirectory}/{basec}_{epoch}.Weights"; Parser.save_weights(net, buff); } if (Network.get_current_batch(net) % 100 == 0) { string buff = $"{backupDirectory}/{basec}.backup"; Parser.save_weights(net, buff); } } string buff2 = $"{backupDirectory}/{basec}.Weights"; Parser.save_weights(net, buff2); }
private static void extract_cifar() { string[] labels = { "airplane", "automobile", "bird", "cat", "deer", "dog", "frog", "horse", "ship", "truck" }; int i; Data.Data train = Data.Data.load_all_cifar10(); Data.Data test = Data.Data.load_cifar10_data("Data.Data/cifar/cifar-10-batches-bin/test_batch.bin"); for (i = 0; i < train.X.Rows; ++i) { Image im = new Image(32, 32, 3, train.X.Vals[i]); int sclass = Utils.max_index(train.Y.Vals[i], 10); string buff = $"Data.Data/cifar/train/{i}_{labels[sclass]}"; LoadArgs.save_image_png(im, buff); } for (i = 0; i < test.X.Rows; ++i) { Image im = new Image(32, 32, 3, test.X.Vals[i]); int sclass = Utils.max_index(test.Y.Vals[i], 10); string buff = $"Data.Data/cifar/test/{i}_{labels[sclass]}"; LoadArgs.save_image_png(im, buff); } }
private static void test_cifar(string filename, string weightfile) { Network net = Parser.parse_network_cfg(filename); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } var sw = new Stopwatch(); float avgAcc = 0; Data.Data test = Data.Data.load_cifar10_data("Data.Data/cifar/cifar-10-batches-bin/test_batch.bin"); sw.Start(); float[] acc = Network.network_accuracies(net, test, 2); avgAcc += acc[0]; sw.Stop(); Console.Write($"top1: %f, %lf seconds, %d images\n", avgAcc, sw.Elapsed.Seconds, test.X.Rows); }
private static void test_cifar_multi(string filename, string weightfile) { Network net = Parser.parse_network_cfg(filename); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } Network.set_batch_network(net, 1); float avgAcc = 0; Data.Data test = Data.Data.load_cifar10_data("Data.Data/cifar/cifar-10-batches-bin/test_batch.bin"); int i; for (i = 0; i < test.X.Rows; ++i) { Image im = new Image(32, 32, 3, test.X.Vals[i]); float[] pred = new float[10]; float[] p = Network.network_predict(net, im.Data); Blas.Axpy_cpu(10, 1, p, pred); LoadArgs.flip_image(im); p = Network.network_predict(net, im.Data); Blas.Axpy_cpu(10, 1, p, pred); int index = Utils.max_index(pred, 10); int sclass = Utils.max_index(test.Y.Vals[i], 10); if (index == sclass) { avgAcc += 1; } Console.Write($"%4d: %.2f%%\n", i, 100f * avgAcc / (i + 1)); } }
private static void train_writing(string cfgfile, string weightfile) { string backupDirectory = "/home/pjreddie/backup/"; float avgLoss = -1; string basec = Utils.Basecfg(cfgfile); Console.Write($"%s\n", basec); Network net = Parser.parse_network_cfg(cfgfile); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } Console.Write($"Learning Rate: %g, Momentum: %g, Decay: %g\n", net.LearningRate, net.Momentum, net.Decay); int imgs = net.Batch * net.Subdivisions; string[] paths = Data.Data.GetPaths("figures.list"); var sw = new Stopwatch(); int n = paths.Length; Console.Write($"N: %d\n", n); Image outf = Network.get_network_image(net); Data.Data buffer = new Data.Data(); LoadArgs args = new LoadArgs(); args.W = net.W; args.H = net.H; args.OutW = outf.W; args.OutH = outf.H; args.Paths = paths; args.N = imgs; args.M = n; args.D = buffer; args.Type = DataType.WritingData; Thread loadThread = Data.Data.load_data_in_thread(args); int epoch = (net.Seen) / n; while (Network.get_current_batch(net) < net.MaxBatches || net.MaxBatches == 0) { sw.Reset(); sw.Start(); loadThread.Join(); var train = buffer; loadThread = Data.Data.load_data_in_thread(args); sw.Stop(); Console.Write($"Loaded %lf seconds\n", sw.Elapsed.Seconds); sw.Reset(); sw.Start(); float loss = Network.train_network(net, train); if (avgLoss == -1) { avgLoss = loss; } avgLoss = avgLoss * .9f + loss * .1f; Console.Write($"%d, %.3f: %f, %f avg, %f rate, %lf seconds, %d images\n", Network.get_current_batch(net), (float)(net.Seen) / n, loss, avgLoss, Network.get_current_rate(net), sw.Elapsed.Seconds, net.Seen); if (Network.get_current_batch(net) % 100 == 0) { string buff = $"{backupDirectory}/{basec}_batch_{Network.get_current_batch(net)}.Weights"; Parser.save_weights(net, buff); } if (net.Seen / n > epoch) { epoch = net.Seen / n; string buff = $"{backupDirectory}/{basec}_{epoch}.Weights"; Parser.save_weights(net, buff); } } }
private static void validate_compare(string filename, string weightfile) { int i = 0; Network net = Parser.parse_network_cfg(filename); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } string[] paths = Data.Data.GetPaths("Data.Data/compare.val.list"); int n = paths.Length / 2; var sw = new Stopwatch(); int correct = 0; int total = 0; int splits = 10; int num = (i + 1) * n / splits - i * n / splits; Data.Data buffer = new Data.Data(); LoadArgs args = new LoadArgs(); args.W = net.W; args.H = net.H; args.Paths = paths; args.Classes = 20; args.N = num; args.M = 0; args.D = buffer; args.Type = DataType.CompareData; Thread loadThread = Data.Data.load_data_in_thread(args); for (i = 1; i <= splits; ++i) { sw.Reset(); sw.Start(); loadThread.Join(); var val = buffer; num = (i + 1) * n / splits - i * n / splits; string[] part = new string[paths.Length - (i * n / splits)]; Array.Copy(paths, i * n / splits, part, 0, part.Length); if (i != splits) { args.Paths = part; loadThread = Data.Data.load_data_in_thread(args); } sw.Stop(); Console.Write($"Loaded: %d images ini %lf seconds\n", val.X.Rows, sw.Elapsed.Seconds); sw.Reset(); sw.Start(); Matrix pred = Network.network_predict_data(net, val); int j, k; for (j = 0; j < val.Y.Rows; ++j) { for (k = 0; k < 20; ++k) { if (val.Y.Vals[j][k * 2] != val.Y.Vals[j][k * 2 + 1]) { ++total; if ((val.Y.Vals[j][k * 2] < val.Y.Vals[j][k * 2 + 1]) == (pred.Vals[j][k * 2] < pred.Vals[j][k * 2 + 1])) { ++correct; } } } } sw.Stop(); Console.Write($"%d: Acc: %f, %lf seconds, %d images\n", i, (float)correct / total, sw.Elapsed.Seconds, val.X.Rows); } }
private static void train_compare(string cfgfile, string weightfile) { float avgLoss = -1; string basec = Utils.Basecfg(cfgfile); string backupDirectory = "/home/pjreddie/backup/"; Console.Write($"%s\n", basec); Network net = Parser.parse_network_cfg(cfgfile); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } Console.Write($"Learning Rate: %g, Momentum: %g, Decay: %g\n", net.LearningRate, net.Momentum, net.Decay); int imgs = 1024; string[] paths = Data.Data.GetPaths("Data.Data/compare.train.list"); int n = paths.Length; Console.Write($"%d\n", n); var sw = new Stopwatch(); Thread loadThread; Data.Data train; Data.Data buffer = new Data.Data(); LoadArgs args = new LoadArgs(); args.W = net.W; args.H = net.H; args.Paths = paths; args.Classes = 20; args.N = imgs; args.M = n; args.D = buffer; args.Type = DataType.CompareData; loadThread = Data.Data.load_data_in_thread(args); int epoch = net.Seen / n; int i = 0; while (true) { ++i; sw.Reset(); sw.Start(); loadThread.Join(); train = buffer; loadThread = Data.Data.load_data_in_thread(args); sw.Stop(); Console.Write($"Loaded: %lf seconds\n", sw.Elapsed.Seconds); sw.Reset(); sw.Start(); float loss = Network.train_network(net, train); if (avgLoss == -1) { avgLoss = loss; } avgLoss = avgLoss * .9f + loss * .1f; sw.Stop(); Console.Write($"%.3f: %f, %f avg, %lf seconds, %d images\n", (float)net.Seen / n, loss, avgLoss, sw.Elapsed.Seconds, net.Seen); if (i % 100 == 0) { string buff = $"{backupDirectory}/{basec}_{epoch}_minor_{i}.Weights"; Parser.save_weights(net, buff); } if (net.Seen / n > epoch) { epoch = net.Seen / n; i = 0; string buff = $"{backupDirectory}/{basec}_{epoch}.Weights"; Parser.save_weights(net, buff); if (epoch % 22 == 0) { net.LearningRate *= .1f; } } } loadThread.Join(); }
private static void validate_classifier_crop(string datacfg, string filename, string weightfile) { int i = 0; Network net = Parser.parse_network_cfg(filename); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } var options = OptionList.read_data_cfg(datacfg); string labelList = OptionList.option_find_str(options, "labels", "Data.Data/labels.list"); string validList = OptionList.option_find_str(options, "valid", "Data.Data/train.list"); int classes = OptionList.option_find_int(options, "classes", 2); int topk = OptionList.option_find_int(options, "top", 1); string[] labels = Data.Data.get_labels(labelList); string[] paths = Data.Data.GetPaths(validList); int m = paths.Length; var sw = new Stopwatch(); float avgAcc = 0; float avgTopk = 0; int splits = m / 1000; int num = (i + 1) * m / splits - i * m / splits; Data.Data buffer = new Data.Data(); LoadArgs args = new LoadArgs(); args.W = net.W; args.H = net.H; args.Paths = paths; args.Classes = classes; args.N = num; args.M = 0; args.Labels = labels; args.D = buffer; args.Type = DataType.OldClassificationData; Thread loadThread = Data.Data.load_data_in_thread(args); for (i = 1; i <= splits; ++i) { sw.Reset(); sw.Start(); loadThread.Join(); var val = buffer; num = (i + 1) * m / splits - i * m / splits; string[] part = new string[paths.Length - (i * m / splits)]; Array.Copy(paths, i * m / splits, part, 0, part.Length); if (i != splits) { args.Paths = part; loadThread = Data.Data.load_data_in_thread(args); } sw.Stop(); Console.Write($"Loaded: %d images ini %lf seconds\n", val.X.Rows, sw.Elapsed.Seconds); sw.Reset(); sw.Start(); float[] acc = Network.network_accuracies(net, val, topk); avgAcc += acc[0]; avgTopk += acc[1]; sw.Stop(); Console.Write($"%d: top 1: %f, top %d: %f, %lf seconds, %d images\n", i, avgAcc / i, topk, avgTopk / i, sw.Elapsed.Seconds, val.X.Rows); } }
private static void test_classifier(string datacfg, string cfgfile, string weightfile, int targetLayer) { int curr = 0; Network net = Parser.parse_network_cfg(cfgfile); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } var options = OptionList.read_data_cfg(datacfg); string testList = OptionList.option_find_str(options, "test", "Data.Data/test.list"); int classes = OptionList.option_find_int(options, "classes", 2); string[] paths = Data.Data.GetPaths(testList); int m = paths.Length; var sw = new Stopwatch(); Data.Data buffer = new Data.Data(); LoadArgs args = new LoadArgs(); args.W = net.W; args.H = net.H; args.Paths = paths; args.Classes = classes; args.N = net.Batch; args.M = 0; args.Labels = new string[0]; args.D = buffer; args.Type = DataType.OldClassificationData; Thread loadThread = Data.Data.load_data_in_thread(args); for (curr = net.Batch; curr < m; curr += net.Batch) { sw.Reset(); sw.Start(); loadThread.Join(); var val = buffer; if (curr < m) { args.Paths = new string[paths.Length - curr]; Array.Copy(paths, curr, args.Paths, 0, args.Paths.Length); if (curr + net.Batch > m) { args.N = m - curr; } loadThread = Data.Data.load_data_in_thread(args); } sw.Stop(); Console.Error.Write($"Loaded: %d images ini %lf seconds\n", val.X.Rows, sw.Elapsed.Seconds); sw.Reset(); sw.Start(); Matrix pred = Network.network_predict_data(net, val); int i, j; for (i = 0; i < pred.Rows; ++i) { Console.Write($"%s", paths[curr - net.Batch + i]); for (j = 0; j < pred.Cols; ++j) { Console.Write($"\t%g", pred.Vals[i][j]); } Console.Write($"\n"); } sw.Stop(); Console.Error.Write($"%lf seconds, %d images, %d total\n", sw.Elapsed.Seconds, val.X.Rows, curr); } }
private static void train_classifier(string datacfg, string cfgfile, string weightfile, int[] gpus, int ngpus, bool clear) { int i; float avgLoss = -1; string basec = Utils.Basecfg(cfgfile); Console.Write($"%s\n", basec); Console.Write($"%d\n", ngpus); Network[] nets = new Network[ngpus]; int seed = Utils.Rand.Next(); for (i = 0; i < ngpus; ++i) { nets[i] = Parser.parse_network_cfg(cfgfile); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(nets[i], weightfile); } if (clear) { nets[i].Seen = 0; } nets[i].LearningRate *= ngpus; } Network net = nets[0]; int imgs = net.Batch * net.Subdivisions * ngpus; Console.Write($"Learning Rate: %g, Momentum: %g, Decay: %g\n", net.LearningRate, net.Momentum, net.Decay); var options = OptionList.read_data_cfg(datacfg); string backupDirectory = OptionList.option_find_str(options, "backup", "/backup/"); string labelList = OptionList.option_find_str(options, "labels", "Data.Data/labels.list"); string trainList = OptionList.option_find_str(options, "train", "Data.Data/train.list"); int classes = OptionList.option_find_int(options, "classes", 2); string[] labels = Data.Data.get_labels(labelList); string[] paths = Data.Data.GetPaths(trainList); Console.Write($"%d\n", paths.Length); int n = paths.Length; var sw = new Stopwatch(); LoadArgs args = new LoadArgs(); args.W = net.W; args.H = net.H; args.Threads = 32; args.Hierarchy = net.Hierarchy; args.Min = net.MinCrop; args.Max = net.MaxCrop; args.Angle = net.Angle; args.Aspect = net.Aspect; args.Exposure = net.Exposure; args.Saturation = net.Saturation; args.Hue = net.Hue; args.Size = net.W; args.Paths = paths; args.Classes = classes; args.N = imgs; args.M = n; args.Labels = labels; args.Type = DataType.ClassificationData; Data.Data train; Data.Data buffer = new Data.Data(); Thread loadThread; args.D = buffer; loadThread = Data.Data.load_data(args); int epoch = (net.Seen) / n; while (Network.get_current_batch(net) < net.MaxBatches || net.MaxBatches == 0) { sw.Reset(); sw.Start(); loadThread.Join(); train = buffer; loadThread = Data.Data.load_data(args); sw.Stop(); Console.Write($"Loaded: %lf seconds\n", sw.Elapsed.Seconds); sw.Reset(); sw.Start(); float loss = 0; if (ngpus == 1) { loss = Network.train_network(net, train); } else { loss = Network.train_networks(nets, ngpus, train, 4); } if (avgLoss == -1) { avgLoss = loss; } avgLoss = avgLoss * .9f + loss * .1f; Console.Write($"%d, %.3f: %f, %f avg, %f rate, %lf seconds, %d images\n", Network.get_current_batch(net), (float)(net.Seen) / n, loss, avgLoss, Network.get_current_rate(net), sw.Elapsed.Seconds, net.Seen); if (net.Seen / n > epoch) { epoch = net.Seen / n; string buff = $"{backupDirectory}/{basec}_{epoch}.Weights"; Parser.save_weights(net, buff); } if (Network.get_current_batch(net) % 100 == 0) { string buff = $"{backupDirectory}/{basec}.backup"; Parser.save_weights(net, buff); } } string buff2 = $"{backupDirectory}/{basec}.Weights"; Parser.save_weights(net, buff2); }
private static void train_yolo(string cfgfile, string weightfile) { string trainImages = "/Data.Data/voc/train.txt"; string backupDirectory = "/home/pjreddie/backup/"; string basec = Utils.Basecfg(cfgfile); Console.Write($"%s\n", basec); float avgLoss = -1; Network net = Parser.parse_network_cfg(cfgfile); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } Console.Write($"Learning Rate: %g, Momentum: %g, Decay: %g\n", net.LearningRate, net.Momentum, net.Decay); int imgs = net.Batch * net.Subdivisions; int i = net.Seen / imgs; Data.Data buffer = new Data.Data(); Layer l = net.Layers[net.N - 1]; int side = l.Side; int classes = l.Classes; float jitter = l.Jitter; string[] paths = Data.Data.GetPaths(trainImages); LoadArgs args = new LoadArgs(); args.W = net.W; args.H = net.H; args.Paths = paths; args.N = imgs; args.M = paths.Length; args.Classes = classes; args.Jitter = jitter; args.NumBoxes = side; args.D = buffer; args.Type = DataType.RegionData; args.Angle = net.Angle; args.Exposure = net.Exposure; args.Saturation = net.Saturation; args.Hue = net.Hue; Thread loadThread = Data.Data.load_data_in_thread(args); var sw = new Stopwatch(); while (Network.get_current_batch(net) < net.MaxBatches) { i += 1; sw.Start(); loadThread.Join(); var train = buffer; loadThread = Data.Data.load_data_in_thread(args); sw.Stop(); Console.Write($"Loaded: %lf seconds\n", sw.Elapsed.Seconds); sw.Reset(); sw.Start(); float loss = Network.train_network(net, train); if (avgLoss < 0) { avgLoss = loss; } avgLoss = avgLoss * .9f + loss * .1f; sw.Stop(); Console.Write($"%d: %f, %f avg, %f rate, %lf seconds, %d images\n", i, loss, avgLoss, Network.get_current_rate(net), sw.Elapsed.Seconds, i * imgs); if (i % 1000 == 0 || (i < 1000 && i % 100 == 0)) { string buff = $"{backupDirectory}/{basec}_{i}.Weights"; Parser.save_weights(net, buff); } } string buff2 = $"{backupDirectory}/{basec}_final.Weights"; Parser.save_weights(net, buff2); }
private static void train_super(string cfgfile, string weightfile) { string trainImages = "/Data.Data/imagenet/imagenet1k.train.list"; string backupDirectory = "/home/pjreddie/backup/"; string basec = Utils.Basecfg(cfgfile); Console.Write($"%s\n", basec); float avgLoss = -1; Network net = Parser.parse_network_cfg(cfgfile); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } Console.Write($"Learning Rate: %g, Momentum: %g, Decay: %g\n", net.LearningRate, net.Momentum, net.Decay); int imgs = net.Batch * net.Subdivisions; int i = net.Seen / imgs; Data.Data buffer = new Data.Data(); string[] paths = Data.Data.GetPaths(trainImages); LoadArgs args = new LoadArgs(); args.W = net.W; args.H = net.H; args.Scale = 4; args.Paths = paths; args.N = imgs; args.M = paths.Length; args.D = buffer; args.Type = DataType.SuperData; Thread loadThread = Data.Data.load_data_in_thread(args); var sw = new Stopwatch(); //while(i*imgs < N*120){ while (Network.get_current_batch(net) < net.MaxBatches) { i += 1; sw.Start(); loadThread.Join(); var train = buffer; loadThread = Data.Data.load_data_in_thread(args); sw.Stop(); Console.Write($"Loaded: %lf seconds\n", sw.Elapsed.Seconds); sw.Reset(); sw.Start(); float loss = Network.train_network(net, train); if (avgLoss < 0) { avgLoss = loss; } avgLoss = avgLoss * .9f + loss * .1f; sw.Stop(); Console.Write($"%d: %f, %f avg, %f rate, %lf seconds, %d images\n", i, loss, avgLoss, Network.get_current_rate(net), sw.Elapsed.Seconds, i * imgs); if (i % 1000 == 0) { string buffa = $"{backupDirectory}/{basec}_{i}.Weights"; Parser.save_weights(net, buffa); } if (i % 100 == 0) { string buffb = $"{backupDirectory}/{basec}.backup"; Parser.save_weights(net, buffb); } } string buff = $"{backupDirectory}/{basec}_final.Weights"; Parser.save_weights(net, buff); }
private static void train_tag(string cfgfile, string weightfile, bool clear) { float avgLoss = -1; string basec = Utils.Basecfg(cfgfile); string backupDirectory = "/home/pjreddie/backup/"; Console.Write($"%s\n", basec); Network net = Parser.parse_network_cfg(cfgfile); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(net, weightfile); } if (clear) { net.Seen = 0; } Console.Write($"Learning Rate: %g, Momentum: %g, Decay: %g\n", net.LearningRate, net.Momentum, net.Decay); int imgs = 1024; string[] paths = Data.Data.GetPaths("/home/pjreddie/tag/train.list"); Console.Write($"%d\n", paths.Length); int n = paths.Length; var sw = new Stopwatch(); Thread loadThread; Data.Data train; Data.Data buffer = new Data.Data(); LoadArgs args = new LoadArgs(); args.W = net.W; args.H = net.H; args.Min = net.W; args.Max = net.MaxCrop; args.Size = net.W; args.Paths = paths; args.Classes = net.Outputs; args.N = imgs; args.M = n; args.D = buffer; args.Type = DataType.TagData; args.Angle = net.Angle; args.Exposure = net.Exposure; args.Saturation = net.Saturation; args.Hue = net.Hue; Console.Error.Write($"%d classes\n", net.Outputs); loadThread = Data.Data.load_data_in_thread(args); int epoch = (net.Seen) / n; while (Network.get_current_batch(net) < net.MaxBatches || net.MaxBatches == 0) { sw.Reset(); sw.Start(); loadThread.Join(); train = buffer; loadThread = Data.Data.load_data_in_thread(args); sw.Stop(); Console.Write($"Loaded: %lf seconds\n", sw.Elapsed.Seconds); sw.Reset(); sw.Start(); float loss = Network.train_network(net, train); if (avgLoss == -1) { avgLoss = loss; } avgLoss = avgLoss * .9f + loss * .1f; Console.Write( $"%d, %.3f: %f, %f avg, %f rate, %lf seconds, %d images\n", Network.get_current_batch(net), (float)(net.Seen) / n, loss, avgLoss, Network.get_current_rate(net), sw.Elapsed.Seconds, net.Seen); if (net.Seen / n > epoch) { epoch = net.Seen / n; string buff = $"{backupDirectory}/{basec}_{epoch}.Weights"; Parser.save_weights(net, buff); } if (Network.get_current_batch(net) % 100 == 0) { string buff = $"{backupDirectory}/{basec}.backup"; Parser.save_weights(net, buff); } } string buff2 = $"{backupDirectory}/{basec}.Weights"; Parser.save_weights(net, buff2); loadThread.Join(); }
private static void train_detector(string datacfg, string cfgfile, string weightfile, bool clear) { var options = OptionList.read_data_cfg(datacfg); string trainImages = OptionList.option_find_str(options, "train", "Data.Data/train.list"); string backupDirectory = OptionList.option_find_str(options, "backup", "/backup/"); string basec = Utils.Basecfg(cfgfile); Console.Write($"%s\n", basec); float avgLoss = -1; Network[] nets = new Network[1]; int seed = Utils.Rand.Next(); int i; for (i = 0; i < 1; ++i) { nets[i] = Parser.parse_network_cfg(cfgfile); if (string.IsNullOrEmpty(weightfile)) { Parser.load_weights(nets[i], weightfile); } if (clear) { nets[i].Seen = 0; } nets[i].LearningRate *= 1; } Network net = nets[0]; int imgs = net.Batch * net.Subdivisions * 1; Console.Write($"Learning Rate: %g, Momentum: %g, Decay: %g\n", net.LearningRate, net.Momentum, net.Decay); Data.Data buffer = new Data.Data(); Layer l = net.Layers[net.N - 1]; int classes = l.Classes; float jitter = l.Jitter; string[] paths = Data.Data.GetPaths(trainImages); LoadArgs args = new LoadArgs(); args.W = net.W; args.H = net.H; args.Paths = paths; args.N = imgs; args.M = paths.Length; args.Classes = classes; args.Jitter = jitter; args.NumBoxes = l.MaxBoxes; args.D = buffer; args.Type = DataType.DetectionData; args.Threads = 8; args.Angle = net.Angle; args.Exposure = net.Exposure; args.Saturation = net.Saturation; args.Hue = net.Hue; Thread loadThread = Data.Data.load_data(args); var sw = new Stopwatch(); int count = 0; while (Network.get_current_batch(net) < net.MaxBatches) { Data.Data train; if (l.Random && count++ % 10 == 0) { Console.Write($"Resizing\n"); int dim = (Utils.Rand.Next() % 10 + 10) * 32; if (Network.get_current_batch(net) + 100 > net.MaxBatches) { dim = 544; } Console.Write($"%d\n", dim); args.W = dim; args.H = dim; loadThread.Join(); loadThread = Data.Data.load_data(args); for (i = 0; i < 1; ++i) { Network.resize_network(nets[i], dim, dim); } net = nets[0]; } sw.Start(); loadThread.Join(); train = buffer; loadThread = Data.Data.load_data(args); sw.Stop(); Console.Write($"Loaded: %lf seconds\n", sw.Elapsed.Seconds); sw.Reset(); sw.Start(); float loss; loss = Network.train_network(net, train); if (avgLoss < 0) { avgLoss = loss; } avgLoss = avgLoss * .9f + loss * .1f; i = Network.get_current_batch(net); sw.Stop(); Console.Write($"%d: %f, %f avg, %f rate, %lf seconds, %d images\n", Network.get_current_batch(net), loss, avgLoss, Network.get_current_rate(net), sw.Elapsed.Seconds, i * imgs); if (i % 1000 == 0 || (i < 1000 && i % 100 == 0)) { string buffa = $"{backupDirectory}/{basec}_{i}.Weights"; Parser.save_weights(net, buffa); } } string buff = $"{backupDirectory}/{basec}_final.Weights"; Parser.save_weights(net, buff); }