public void EqualsTest() { var equalTrainList = new List <TrainSet>(); // Add numbers for (int i = 0; i <= 10; i++) { for (int j = 0; j <= 10; j++) { bool isEqual = i == j; equalTrainList.Add(new TrainSet { Input = new double[] { i, j }, Output = isEqual ? 1.0 : 0.0 }); } } FeedForward net = new FeedForward(new Layer(2, new Sigmoid()), new Layer[] { new Layer(4, new Sigmoid()) }, new Layer(1, new Sigmoid())); net.Train(equalTrainList, 10000, learningRate: 1); foreach (TrainSet ts in equalTrainList) { double result = net.Handle(ts.Input)[0]; double roundedResult = Math.Round(result); Assert.AreEqual(ts.Output, roundedResult); } }
static void Main(string[] args) { NumericsTest test = new NumericsTest(); test.Test(); var log4_net_config = Path.Combine(Path.GetDirectoryName(typeof(Program).Assembly.Location), "log4net.config"); XmlConfigurator.Configure(new FileInfo(log4_net_config)); int batch_size = 32; uint w = 60; uint h = 20; float learning_rate = 1e-4f; float weight_decay = 1e-4f; ReadData rdtrain = new ReadData("data\\train\\", batch_size); ReadData rdval = new ReadData("data\\val\\", batch_size); //var first = rdtrain.First(); Context ctx = new Context(DeviceType.KGpu, 0); //NDArray dataArray = new NDArray(new Shape((uint)batchSize, 3, W, H), ctx, false); //NDArray labelArray = new NDArray(new Shape((uint)batchSize,4), ctx, false); //Symbol data1 = Symbol.Variable("data1"); //Symbol data2 = Symbol.Variable("data2"); var pnet = get_ocrnet(batch_size); Speedometer speed = new Speedometer(batch_size, 50); CustomMetric custom_metric = new CustomMetric((l, p) => Accuracy(l, p, batch_size)); Optimizer optimizer = new CcSgd(momentum: 0.9f, learning_rate: 0.001f, wd: 0.00001f, rescale_grad: 1.0f / batch_size); FeedForward model = new FeedForward(pnet, new List <Context> { ctx }, num_epoch: 10, optimizer: optimizer, initializer: new Xavier(factor_type: FactorType.In, magnitude: 2.34f) ); model.Fit(rdtrain, rdval, custom_metric, batch_end_callback: new List <Action <mxnet.csharp.util.BatchEndParam> > { speed.Call }); Console.WriteLine(""); }
public void DoPredict() { int batch_size = 16; Context ctx = new Context(DeviceType.KCpu, 0); Optimizer optimizer = new CcSgd(momentum: 0.9f, learningRate: 0.001f, wd: 0.00001f, rescaleGrad: 1.0f / batch_size); var modelload = FeedForward.Load("checkpoint\\tag", ctx: ctx, numEpoch: 1, optimizer: optimizer, initializer: new Xavier(factorType: FactorType.In, magnitude: 2.34f)); }
static void Main(string[] args) { FeedForward JARVIS = new FeedForward("JARVIS"); JARVIS.AddLayer(new InputLayer("Input", new int[] { 4, 1 })); JARVIS.AddLayer(new DenseLayer("Dense1", new Identity(), 8)); JARVIS.AddLayer(new DenseLayer("Dense2", new Identity(), 6)); JARVIS.AddLayer(new DenseLayer("Output", new Identity(), 2)); JARVIS.CompileModel(); JARVIS.ShowSummary(); }
static void Main(string[] args) { NumericsTest test = new NumericsTest(); test.Test(); var log4_net_config = Path.Combine(Path.GetDirectoryName(typeof(Program).Assembly.Location), "log4net.config"); XmlConfigurator.Configure(new FileInfo(log4_net_config)); var model = FeedForward.Load("checkpoint\\cnn"); ReadData rdpredict = new ReadData("data\\train\\", 32, true); var testOut = model.Predict(rdpredict, 1); TrainTest(); }
private void createNet() { int[] layers = new int[_hiddenLayers.Length + 2]; // input/output layers[0] = sensors.data.Length + (NetOutputHistory.Length); _hiddenLayers.CopyTo(layers, 1); layers[layers.Length - 1] = NetOutputCount; Net = new FeedForward(layers); NeuralNet.Mutators.SelfMutate(new NeuralNet.Net[] { Net }, new NeuralNet.Options() { { "clone", false }, { "mutationProbability", 1 }, { "mutationFactor", 1 }, { "mutationRange", 1000 }, }); // Net fitness is based on amount consumed. Net.FitnessEvaluator = n => (double)stats.Consumed; }
private static void TrainTest() { int batch_size = 32; ReadData rdtrain = new ReadData("data\\train\\", batch_size); ReadData rdval = new ReadData("data\\val\\", batch_size); Context ctx = new Context(DeviceType.KGpu, 0); var pnet = get_ocrnet(batch_size); Speedometer speed = new Speedometer(batch_size, 50); DoCheckpoint doCheckpoint = new DoCheckpoint("checkpoint\\cnn"); CustomMetric customMetric = new CustomMetric((l, p) => Accuracy(l, p, batch_size), "Accuracy"); Optimizer optimizer = new CcSgd(momentum: 0.9f, learningRate: 0.001f, wd: 0.00001f, rescaleGrad: 1.0f / batch_size); FeedForward model = new FeedForward(pnet, new List <Context> { ctx }, numEpoch: 1, optimizer: optimizer, initializer: new Xavier(factorType: FactorType.In, magnitude: 2.34f) ); model.Fit(rdtrain, rdval, customMetric, batchEndCallback: new List <BatchEndDelegate> { speed.Call }, epochEndCallback: new List <EpochEndDelegate> { doCheckpoint.Call }); model.Save("checkpoint\\cnn"); ReadData rdpredict = new ReadData("data\\train\\", batch_size, true); var testOut = model.Predict(rdpredict, 1); Console.WriteLine(""); }
public void XorTest() { var xorTrainList = new List <TrainSet>(); xorTrainList.Add(new TrainSet { Input = new double[] { 0.0, 1.0 }, Output = 1.0 }); xorTrainList.Add(new TrainSet { Input = new double[] { 1.0, 0.0 }, Output = 1.0 }); xorTrainList.Add(new TrainSet { Input = new double[] { 0.0, 0.0 }, Output = 0.0 }); xorTrainList.Add(new TrainSet { Input = new double[] { 1.0, 1.0 }, Output = 0.0 }); Layer[] hiddenLayers = { new Layer(4, new Sigmoid()) }; FeedForward net = new FeedForward(new Layer(2, new Sigmoid()), hiddenLayers, new Layer(1, new Sigmoid())); net.Train(xorTrainList, 5000); foreach (TrainSet ts in xorTrainList) { double result = net.Handle(ts.Input)[0]; double roundedResult = Math.Round(result); Console.WriteLine($"{ts.Input[0]} X {ts.Input[1]} = {result}"); Assert.AreEqual(ts.Output, roundedResult); } }
private static void Main(string[] args) { var activation = new Relu(); var n = new FeedForward(); n.AddLayer(new NeuronLayer(2, activation)); n.AddLayer(new NeuronLayer(3, activation)); n.AddLayer(new NeuronLayer(1, activation)); n.Construct(); var str = ""; for (int i = 0; i < n.Output.Length; i++) { str += n.Output[i].ToString() + "\n"; } var inp = new MatrixFloat[] { new MatrixFloat(new float[, ] { { 1, 1 } }), new MatrixFloat(new float[, ] { { 0, 1 } }), new MatrixFloat(new float[, ] { { 1, 0 } }), new MatrixFloat(new float[, ] { { 0, 0 } }), }; var expected = new MatrixFloat[] { new MatrixFloat(new float[, ] { { 0 } }), new MatrixFloat(new float[, ] { { 1 } }), new MatrixFloat(new float[, ] { { 1 } }), new MatrixFloat(new float[, ] { { 0 } }) }; for (int i = 0; i < 500000; i++) { var error = 0f; for (int k = 0; k < 4; k++) { var np1 = inp[k]; for (int j = 0; j < np1.Columns; j++) { n.Input[j] = np1[0, j]; } var exp = expected[k]; n.Forward(); for (int z = 0; z < exp.Columns; z++) { var tmp = (n.Output[z] - exp[0, z]); error += tmp * tmp; } n.Backward(exp); n.Clear(); } Console.WriteLine(error); } Console.ReadKey(); }
public void TransplantNet(NeuralNet.Net net) { this.Net = (FeedForward)net; // Net fitness is based on amount consumed. Net.FitnessEvaluator = n => (double)stats.Consumed; }