private void Go() { var lijstje = DataSetGenerator.Generate(); int size = 500; var img = new Image <Rgba32>(size, size); MLImager.AddPointsToImage(img, lijstje); using (var fs = new FileStream("output.png", FileMode.Create, FileAccess.Write, FileShare.Read)) { img.SaveAsPng(fs); } lijstje.Shuffle(); var testData = lijstje.Take(lijstje.Count / 2).ToList(); var trainData = lijstje.Skip(lijstje.Count / 2).ToList(); foreach (var inp in INPUTS.INPUTSDICT) { state[inp.Key] = false; } state["x"] = true; state["y"] = true; //state["xTimesY"] = true; //var networkShape = new List<int>() { 2, 1, 1 }; var networkShape = new List <int>() { 2, 8, 8, 8, 8, 8, 8, 1 }; var inputIds = new List <string>() { "x", "y" }; var network = NetworkBuilder.BuildNetwork(networkShape, Activations.TANH, Activations.TANH, null, inputIds, false); var w = Stopwatch.StartNew(); for (int i = 0; i < 100000; i++) { foreach (var trainPoint in trainData) { var input = MLHelper.ConstructInput(state, trainPoint.X, trainPoint.Y); NetworkBuilder.ForwardProp(network, input); NetworkBuilder.BackProp(network, trainPoint.Label, Errors.SQUARE); if ((i + 1) % batchSize == 0) { NetworkBuilder.UpdateWeights(network, learningrate, regularizationRate); } } lossTrain = MLHelper.GetLoss(network, state, trainData); lossTest = MLHelper.GetLoss(network, state, testData); Console.WriteLine($"{i}: LossTrain: {lossTrain} LossTest: {lossTest}"); if (w.Elapsed.TotalSeconds > 1) { var img2 = MLImager.GenerateImage(network, state, size); MLImager.AddPointsToImage(img2, lijstje); try { using (var fs = new FileStream("output2.png", FileMode.Create, FileAccess.Write, FileShare.Read)) { img2.SaveAsPng(fs); } } catch (Exception ex) { } w.Restart(); } //for (int y = 1; y < network.Count; y++) //{ // var layer = network[y]; // Console.WriteLine(y); // foreach (var node in layer) // { // var sb = new StringBuilder(); // foreach (var link in node.InputLinks) // { // sb.Append($"Lnk: {link.Weight} "); // } // Console.WriteLine($" {sb.ToString()}"); // } // Console.WriteLine(); //} //double cccc = 0; //foreach (var testPoint in testData) //{ // var input = ConstructInput(testPoint.X, testPoint.Y); // var result = NetworkBuilder.ForwardProp(network, input); // var res = Math.Abs(result - testPoint.Label); // cccc += res; //} //Console.WriteLine($"Res: {cccc}"); } }
public static Network BuildNetwork() { var builder = new NetworkBuilder("LeNet") .AddInputLayer(new InputLayerVertex("data")) .AddLayerBlock( b => b.AddLayer(new ConvolutionalLayerVertex("conv1", new ConvolutionalLayerParameter( numberOfOutput: 64, kernelSize: 7, padding: 3, stride: 2, biasTerm: true))) .AddActivation(new ActivationLayerVertex("conv1_relu")) .AddBatchNorm(new BatchNormalizationLayerVertex("bn_conv1")) .AddScale(new ScaleLayerVertex("scale_conv1"))) .AddLayer(new PoolingLayerVertex("pool1", new PoolingLayerParameter( PoolingLayerKind.Max, kernelSize: 3, stride: 2))) .AddResidualBlock( left: b => b.AddLayerBlock( lb => lb.AddLayer(new ConvolutionalLayerVertex("res2a_branch1", new ConvolutionalLayerParameter( numberOfOutput: 256, kernelSize: 1, padding: 0, stride: 1, biasTerm: false))) .AddBatchNorm(new BatchNormalizationLayerVertex("bn2a_branch1")) .AddScale(new ScaleLayerVertex("scale2a_branch1"))), right: b => b.AddLayerBlock( lb => lb.AddLayer(new ConvolutionalLayerVertex("res2a_branch2a", new ConvolutionalLayerParameter( numberOfOutput: 64, kernelSize: 1, padding: 0, stride: 1, biasTerm: false))) .AddBatchNorm(new BatchNormalizationLayerVertex("bn2a_branch2a")) .AddScale(new ScaleLayerVertex("scale2a_branch2a")) .AddActivation(new ActivationLayerVertex("res2a_branch2a_relu"))) .AddLayerBlock( lb => lb.AddLayer(new ConvolutionalLayerVertex("res2a_branch2b", new ConvolutionalLayerParameter( numberOfOutput: 64, kernelSize: 3, padding: 1, stride: 1, biasTerm: false))) .AddBatchNorm(new BatchNormalizationLayerVertex("bn2a_branch2b")) .AddScale(new ScaleLayerVertex("scale2a_branch2b")) .AddActivation(new ActivationLayerVertex("res2a_branch2b_relu"))) .AddLayerBlock( lb => lb.AddLayer(new ConvolutionalLayerVertex("res2a_branch2c", new ConvolutionalLayerParameter( numberOfOutput: 256, kernelSize: 1, padding: 0, stride: 1, biasTerm: false))) .AddBatchNorm(new BatchNormalizationLayerVertex("bn2a_branch2c")) .AddScale(new ScaleLayerVertex("scale2a_branch2c")))) .AddEltwise(new EltwiseLayerVertex("res2a")) .AddActivation(new ActivationLayerVertex("res2a_relu")); return(builder.BuildNetwork()); }