public void ActivateTest() { NeuralNetwork nn = new NeuralNetwork(2, 2, 1); Assert.AreNotEqual(new double[] { 0d }, nn.Activate(new float[] { 0f, 1f })); Assert.AreNotEqual(new double[] { 1d }, nn.Activate(new float[] { 0f, 1f })); Assert.AreEqual(1, nn.Activate(new float[] { 0f, 1f }).Length); }
static void Main(string[] args) { Random random = new Random(13); NeuralNetwork cNN = new NeuralNetwork(random, 0.2); var conv = new ConvolutionLayer(new RectifiedLinearUnit(0.01), 8, 3, 3); conv.IsSame = true; cNN.AddNewLayer(new Shape(28, 28), conv); cNN.AddNewLayer(new MaxPooling(2, 2)); cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 16, 3, 3)); cNN.AddNewLayer(new MaxPooling(2, 2)); cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 32, 3, 3)); cNN.AddNewLayer(new UnPooling(2, 2)); cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 16, 3, 3)); cNN.AddNewLayer(new MaxPooling(2, 2)); cNN.AddNewLayer(new Flatten()); cNN.AddNewLayer(new FeedForwardLayer(20, new RectifiedLinearUnit(0.01))); cNN.AddNewLayer(new FeedForwardLayer(2, new SoftmaxUnit())); Console.WriteLine(cNN); GraphCPU graph = new GraphCPU(false); NNValue nValue = NNValue.Random(28, 28, 2, random); NNValue nValue1 = NNValue.Random(28, 28, 2, random); NNValue outp = new NNValue(new double[] { 0, 1 }); NNValue outp1 = new NNValue(new double[] { 1, 0 }); DataSetNoReccurent data = new DataSetNoReccurent(new NNValue[] { nValue, nValue1 }, new NNValue[] { outp, outp1 }, new CrossEntropyWithSoftmax()); TrainerCPU trainer = new TrainerCPU(TrainType.MiniBatch, new Adam()); trainer.BatchSize = 2; trainer.Train(10000, 0.001, cNN, data, 2, 0.0001); double[] dbs = cNN.Activate(nValue, graph).DataInTensor; double[] dbs1 = cNN.Activate(nValue1, graph).DataInTensor; }
public void TrainTest() { NeuralNetwork nn = new NeuralNetwork(2, 2, 1); nn.Train(new Tuple <float[], double[]>[] { new Tuple <float[], double[]>(new float[] { 1, 0 }, new double[] { 1 }), new Tuple <float[], double[]>(new float[] { 1, 1 }, new double[] { 0 }), new Tuple <float[], double[]>(new float[] { 0, 1 }, new double[] { 1 }), new Tuple <float[], double[]>(new float[] { 0, 0 }, new double[] { 0 }) }, 1000, 0.1f); Assert.AreEqual(1D, nn.Activate(new float[] { 0f, 1f })[0], 0.2); }
public static string generateOutput(NeuralNetwork network, char start, int length) { Matrix input = new Matrix(FieldLetterTranslator.traslateToField(start)); Graph g = new Graph(false); Random rnd = new Random(); string result = ""; for (int i = 0; i < length; i++) { Matrix output = network.Activate(input, g); //char act = FieldLetterTranslator.traslateToLetter(output.W); char act = 'a'; try { act = FieldLetterTranslator.Letters[Util.PickIndexFromRandomVector(output, rnd)]; } catch (Exception ex) { Console.WriteLine(ex.Message); } input = new Matrix(FieldLetterTranslator.traslateToField(act)); g = new Graph(false); result += act; } return(result.Replace('$', '\n')); }
private void activateNeuralNetwork(List <double> nnInputs) { for (int i = 0; i < nnInputs.Count; i++) { brain.PutSignalToNeuron(i, nnInputs[i]); } brain.Activate(); }
private static void Main(string[] args) { Console.WriteLine("App running"); const int countOutputs = 4; var data = FileReader.Read(@"D:\auto.csv", countOutputs); Console.WriteLine("Data from file loaded."); var network = new NeuralNetwork(0.01, new SigmoidFunction(), data.Input.GetLength(1), 4, countOutputs); Console.WriteLine("Neural network created."); var dataSets = data.Input; var expectedResults = data.Output; if (ArrayHelper.Each(dataSets).Any(x => x > 1 || x < 0)) { //scaling data [0...1] dataSets = network.Scaling(dataSets); Console.WriteLine("Data scaled."); } Console.WriteLine("Start train..."); network.Train(expectedResults, dataSets, CountEpoch); Console.WriteLine("End train."); //Compare data after training var results = new List <List <double> >(); for (var i = 0; i < expectedResults.GetLength(0); i++) { var input = ArrayHelper.GetRow(dataSets, i).ToList(); var result = network.Activate(input); results.Add(result); } Console.WriteLine("Results:"); for (var i = 0; i < results.Count; i++) { for (var j = 0; j < results[i].Count; j++) { var expected = Math.Round(expectedResults[i, j]); var actual = Math.Round(results[i][j], 1); Console.WriteLine($"{expected} = {actual}; {expected == actual}"); } Console.WriteLine(); } Console.ReadKey(); }
static void Main(string[] args) { Console.SetWindowSize(width, height); Console.SetBufferSize(width, height + 500); Design.Headline(Application.ProductName.Replace('_', ' ')); int inputSize = 10; BaseTrans[] inputs = new BaseTrans[inputSize]; NeuralNetwork nn = new NeuralNetwork(); Random r = new Random(); for (int i = 0; i < inputSize; i++) { inputs[i] = new BaseTrans(); inputs[i].Value = r.Next(100); nn.AddInput(inputs[i]); } BaseRec r1 = new BaseRec(); //BaseReciever r2 = new BaseReciever nn.AddOutput(r1); nn.AddOutput(r1); //nn.AddOutput(r2); //int iterationNum = 10000; //double sum = 0 nn.Build(); LearningSet ls = nn.GenerateLearningSet(); foreach (BaseTrans input in inputs) { ls.GiveInput(input, r.Next(100)); } ls.ExpectOutput(r1, 100); nn.Practice(ls); nn.Activate(); //Console.WriteLine(r1.Value); //sum += r1.Value; //Console.WriteLine(r1.Value); //Console.WriteLine(sum / iterationNum); //Design.End(); }
public void Forward(int[] inp) { GraphCPU graph = new GraphCPU(false); int indOld = 10; network.ResetState(); for (int i = 0; i < inp.Length; i++) { NNValue valueMatrix = new NNValue(DataSetSeq2Seq.GetValue(inp[i], 11)); network.Activate(valueMatrix, graph); } for (int i = 0; i < inp.Length; i++) { NNValue valueMatrix = new NNValue(DataSetSeq2Seq.GetValue(indOld, 11)); indOld = GetInd(network.Activate(valueMatrix, graph)); Console.Write(indOld); } Console.WriteLine(); }
public void Run() { Stopwatch watch = Stopwatch.StartNew(); NeuralNetwork network = new NeuralNetwork(); network.AddLayer("input", new InputLayer(2), BaseLayer.TYPE.INPUT); network.AddLayer("hidden", new CoreLayer(8, ACTIVATION.SIGMOID, BaseLayer.TYPE.HIDDEN), BaseLayer.TYPE.HIDDEN); network.AddLayer("output", new CoreLayer(1, ACTIVATION.SIGMOID, BaseLayer.TYPE.OUTPUT), BaseLayer.TYPE.OUTPUT); network.AddConnection("input", "hidden", Connection.INIT.GLOROT_UNIFORM); network.AddConnection("hidden", "output", Connection.INIT.GLOROT_UNIFORM); /* * Optimizer optimizer = new BackProp(network, 1e-5f, 0.99f, true) * { * Alpha = 0.1f * }; */ Optimizer optimizer = new RMSProp(network) { Alpha = 0.1f }; optimizer.InitBatchMode(4); Vector[] input = new Vector[4]; Vector[] target = new Vector[4]; //Vector output = null; input[0] = Vector.Build(2, new float[] { 0f, 0f }); input[1] = Vector.Build(2, new float[] { 0f, 1f }); input[2] = Vector.Build(2, new float[] { 1f, 0f }); input[3] = Vector.Build(2, new float[] { 1f, 1f }); target[0] = Vector.Build(1, new float[] { 0f }); target[1] = Vector.Build(1, new float[] { 1f }); target[2] = Vector.Build(1, new float[] { 1f }); target[3] = Vector.Build(1, new float[] { 0f }); for (int e = 0; e < 200; e++) { //Console.Write("Start "); //BasePool.Instance.Check(); float err = 0; for (int i = 0; i < 4; i++) { err += optimizer.Train(input[i], target[i]); } Console.WriteLine(err); //Console.Write("End "); //BasePool.Instance.Check(); } Console.WriteLine(); for (int i = 0; i < 4; i++) { Console.WriteLine(network.Activate(input[i])[0]); Vector.Release(input[i]); Vector.Release(target[i]); } optimizer.Dispose(); Console.Write("Finish "); BasePool.Instance.Check(); watch.Stop(); Console.WriteLine(watch.ElapsedMilliseconds); }