public static void LoadWeights(this vnn t, string str) { using (var reader = new StreamReader(str, System.Text.Encoding.ASCII)) { for (int i = 0; i <= t.NInput; i++) { for (int j = 0; j < t.NHidden; j++) { //set weights to random values t.GwInputHidden[j, i] = double.Parse(reader.ReadLine()); } } //set weights between input and hidden //-------------------------------------------------------------------------------------------------------- for (int i = 0; i <= t.NHidden; i++) { for (int j = 0; j < t.NOutput; j++) { //set weights to random values t.GwHiddenOutput[j, i] = double.Parse(reader.ReadLine()); } } } }
public static void SaveWeights(this vnn t, string str) { using (var writer = new System.IO.StreamWriter(str, false, System.Text.Encoding.ASCII)) { for (int i = 0; i <= t.NInput; i++) { for (int j = 0; j < t.NHidden; j++) { //set weights to random values writer.WriteLine(t.GwInputHidden[j, i]); } } //set weights between input and hidden //-------------------------------------------------------------------------------------------------------- for (int i = 0; i <= t.NHidden; i++) { for (int j = 0; j < t.NOutput; j++) { //set weights to random values writer.WriteLine(t.GwHiddenOutput[j, i]); } } } }
public static void simple_portable(vnn nn) { //var tr = new trainerModern(nn); var tr = new trainerNoMomentum(nn); var tset = data.DataSets.twoParamTest; Stopwatch sw = Stopwatch.StartNew(); const double ACCRATE = 0.05; for (int i = 0; i < 500; i++) { //tr.TrainEpoch(tset.inputs, tset.outputs, 0.1, 0.9); for (int j = 0, to = tset.size; j < to; j++) { tr.TrainOne(tset.inputs[j], tset.outputs[j], 0.1); } } var acc = tset.getAccuracy(nn, ACCRATE); // if (acc < 0.9) { throw new Exception($"Not enought accuracy: {acc.ToString("N2")}"); } WriteLine("Elapsed = " + sw.ElapsedMilliseconds + " ms."); WriteLine($"Accuracy (+-{ACCRATE}) = {acc.ToString("N2")}"); WriteLine("Random MSE = " + tset.getRandomMSE(nn, 500)); }
public static vnn createNewNN() { var nn = new vnn(U.InputSize, NNHIDEN, 1); addon.RandomizeUniform(nn); return(nn); }
static void SimpleVNNBackpropNoMomentumSpeedTest() { var nn = new vnn(INPUT_SIZE, HIDDEN_SIZE, OUTPUT_SIZE, (mlp) => addon.RandomizeUniform(mlp)); var tr = new trainerNoMomentum(nn); WriteLine($"Time: {measureBackpropSpeed(nn, tr)}"); }
public void straight() { int size = 25; vnn nn = new vnn(size, 500, size, addon.RandomizeWeights); var tr = new trainerModern(nn); List <double[]> history = new List <double[]>(); trainingSet tset; Stopwatch sw = Stopwatch.StartNew(); int counter = 0; var rand = new Random(); do { var data = new double[size]; data[rand.Next(0, data.Length)] = 1.0; history.Add(data); tset = new trainingSet(history.ToArray(), history.ToArray()); tr.TrainEpoch(tset.inputs, tset.outputs, 0.02, 0.9); //tr.TrainOne(data, data, 0.1, 0.9); counter++; Console.Title = $"{counter} ({counter * tset.size})"; }while (tset.getAccuracy(nn, 0.1) < 0.9); WriteLine("Elapsed = " + sw.ElapsedMilliseconds + " ms."); WriteLine("MSE = " + tset.getMSE(nn)); WriteLine("Random MSE = " + tset.getRandomMSE(nn, 500)); nn.TestLoop(new Func <double[], double[]>(arr => new double[] { arr[0] > 0.5 ? 1 : 0, Sin(arr[0] + arr[1]) * arr[0] })); }
public unsafe static void backpropagate(vnn NN, double[] outputErrorGradients, double[] hiddenErrorGradients, double[] desiredOutputs, double learningRate) { getOEG(NN.outputNeurons, desiredOutputs, outputErrorGradients); mult(NN.wHiddenOutput, NN.hiddenNeurons, outputErrorGradients, learningRate); getHEG(NN.wHiddenOutput, NN.hiddenNeurons, outputErrorGradients, hiddenErrorGradients); mult(NN.wInputHidden, NN.inputNeurons, hiddenErrorGradients, learningRate); }
public trainerModern(vnn net) { NN = net; outputErrorGradients = new double[net.nOutput]; hiddenErrorGradients = new double[net.nHidden]; deltaHiddenOutput = new double[net.NOutput, net.nHidden + 1]; deltaInputHidden = new double[net.NHidden, net.nInput + 1]; }
public VNNWrapper(vnn _nn, string _savepath) { this.savepath = _savepath; this.nn = _nn; this.nncopy = nn.Copy(); tr = new trainerNoMomentum(this.nncopy); slider = vutils.VSlider.RunAsync(); manip = slider.AddWatch(setLearningRate, learningRate, 0, 0.2, 0.01); manip.Name = "Learning Rate"; }
public static void check_ex(vnn nn, VNNAddOn.trainingSet tset) { if (nn.nInput != tset.inputs[0].Length) { throw new NNSetNotMatch(true); } if (nn.nOutput != tset.outputs[0].Length) { throw new NNSetNotMatch(false); } }
void newtraintertest() { vnn nn = new vnn(2, 100, 2, addon.RandomizeWeights); var tr = new data.CustomClasses.randomUpdateTrainer(nn, data.DataSets.twoParamTest, 0.1, 0.9); Stopwatch sw = Stopwatch.StartNew(); //tr.TrainUntilMSE(0.01, 10); tr.TrainUntilAccuracy(0.9, 0.1); WriteLine("Elapsed = " + sw.ElapsedMilliseconds); WriteLine("MSE = " + tr.getMSE()); }
public void test_deep_sizes() { var nnd = vnnDeep.CreateRandom(rand, 2, 100, 2); var nn = new vnn(2, 100, 2, (mlp) => addon.RandomizeUniform(mlp)); Console.WriteLine($"nn= wih:{nn.wInputHidden.GetLength(0)}x{nn.wInputHidden.GetLength(1)} who:{nn.wHiddenOutput.GetLength(0)}x{nn.wHiddenOutput.GetLength(1)}"); Console.WriteLine($"nd= wih:{nnd.L[0].GetLength(0)}x{nnd.L[0].GetLength(1)} who:{nnd.L[1].GetLength(0)}x{nnd.L[1].GetLength(1)}"); Console.WriteLine($"nn= nin:{nn.inputNeurons.Length} nh:{nn.hiddenNeurons.Length} no:{nn.outputNeurons.Length}"); Console.WriteLine($"nd= nin:{nnd.N[0].Length} nh:{nnd.N[1].Length} no:{nnd.N[2].Length}"); Console.WriteLine($"nd= sin:{nnd.size[0]} sh:{nnd.size[1]} so:{nnd.size[2]}"); Console.WriteLine($"nd= size:{nnd.size.Count}"); }
public sortingTest() { int n = 3; var nn = new vnn(n, 100, n); var tr = new trainer(nn, data.DataSets.sorting(n), 0.1, 0.9); tr.TrainFor(10000); System.IO.File.WriteAllBytes("E:\\OneDrive\\Other\\Temp\\tempNN", nn.ToBytes()); WriteLine("MSE = " + tr.getMSE()); WriteLine("Acc = " + tr.getAccuracy(1 / (double)n)); nn.TestLoop(); }
// [TestMethod] void saveRandomWeights() { //var nn = new vnn(nI, nH, nO, (mlp) => VNNAddOn.addon.RandomizeUniform(mlp)); //printAnswer(nn); //File.WriteAllBytes(savepath, nn.ToBytes()); var nn2 = new vnn(File.ReadAllBytes(savepath)); //var nn2 = new vnn(nI, nH, nO, (mlp) => VNNAddOn.addon.RandomizeUniform(mlp)); printAnswer(nn2); }
public void test_binary() { vnn nn = new vnn(2, 5, 2); reporter rep = new reporter(nn); Console.WriteLine(rep.wHiddenOutput); byte[] bts = nn.ToBytes(); vnn newnn = new vnn(bts); rep = new reporter(newnn); Console.WriteLine(rep.wHiddenOutput); Console.ReadKey(); }
public trainer(vnn net, trainingSet set, double learning_rate, double moment) { exceptions.NNSetNotMatch.check_ex(net, set); NN = net; tset = set; momentum = moment; learningRate = learning_rate; outputErrorGradients = new double[net.nOutput]; hiddenErrorGradients = new double[net.nHidden]; deltaHiddenOutput = new double[net.nOutput, net.nHidden + 1]; deltaInputHidden = new double[net.nHidden, net.nInput + 1]; }
public void nn_test(string nnfile, string serializerName = nameof(StdSerializers.AllOneHotSerial)) { string path = File.Exists(nnfile) ? nnfile : root.StdDir + nnfile; nn = new vnn(File.ReadAllBytes(path)); getPlayers(out Player A, out Player B, out var data); ObviousWin(A, B); //double[] inputs = LearningGround.serializer.ConvertPosition(A); double[] inputs = StdSerializers.Util.CreateFromName(serializerName, data).ConvertPosition(A); double output = nn.feedResult(inputs)[0]; double formatted = Math.Round(output, 2) * 100; Console.WriteLine($"A will win = {formatted}%"); }
public static void StepFull(vnn nn) { for (int i = 0; i <= nn.nInput; i++) { for (int j = 0; j < nn.nHidden; j++) { nn.wInputHidden[i, j] = rand.NextDouble() / nn.nInput; } } for (int i = 0; i <= nn.nHidden; i++) { for (int j = 0; j < nn.nOutput; j++) { nn.wHiddenOutput[i, j] = rand.NextDouble() / nn.nHidden; } } }
public static void SigmoidFull(vnn nn) { for (int i = 0; i <= nn.nInput; i++) { for (int j = 0; j < nn.nHidden; j++) { nn.wInputHidden[i, j] = (rand.NextDouble() * 12 - 6) / (nn.nInput + 1); } } for (int i = 0; i <= nn.nHidden; i++) { for (int j = 0; j < nn.nOutput; j++) { nn.wHiddenOutput[i, j] = (rand.NextDouble() * 12 - 6) / (nn.nHidden + 1); } } }
public static void TestLoop(this vnn nn, Func <string, double[]> input_translator, Func <double[], string> output_translator) { try { while (true) { double[] inp = new double[nn.NInput]; Console.Write("in = "); inp = input_translator(Console.ReadLine()); Console.WriteLine(); double[] ans = nn.feedResult(inp); Console.WriteLine("out = " + output_translator(ans)); Console.WriteLine("".PadRight(10, '=')); } } catch { } }
static void test_randomizing() { vnn nn; trainer tr; double mean = 0.0; double to = 1.0; for (int i = 0; i < to; i++) { nn = new vnn(2, 2, 1); tr = new trainer(nn, data.DataSets.xorProblem, 0.1, 0.9); Stopwatch sw = Stopwatch.StartNew(); tr.TrainUntilAccuracy(0.8, 0.2, 100); mean += sw.ElapsedMilliseconds; } Console.WriteLine(mean / to); Console.ReadKey(); }
unsafe void testWeights(double[] inputs, Action <ISimpleMLP> randFunc, string name) { var nn = new vnn(NINPUTS, NHIDDEN, NOUTPUT, randFunc); //var nn = new vnnCpp(NINPUTS, NHIDDEN, NOUTPUT); var re = nn.feedResult(inputs); //Histogram.PrintHist(nn.hiddenNeurons); Histogram.PrintHist(re); Histogram.ShowHist(re, name: name); //PrintHist(re); //Write("Inputs\t: "); //PrintHist(nn.inputNeurons.Take(NINPUTS).ToArray()); //Write("Hidden\t: "); //PrintHist(nn.hiddenNeurons.Take(NHIDDEN).ToArray(), lower: 0, upper: 1); //Write("Output\t: "); //PrintHist(nn.outputNeurons, lower: 0, upper: 1); }
static void GetError(FileStream logStream, int len, vnn nn, out double err, out double corr, out double avg_out) { const int n = 3000; err = 0.0; corr = 0.0; avg_out = 0.0; for (int i = 0; i < n; i++) { if (learn.GetNextGame(logStream, len, out var inp, out var res)) { double output = nn.feedResult(inp)[0]; double diff = Math.Abs(res[0] - output); err = (err * i + diff) / (double)(i + 1); avg_out = (avg_out * i + output) / (double)(i + 1); if (diff < 0.5) { corr++; } }
public static void TestLoop(this vnn nn) { try { while (true) { double[] inp = new double[nn.NInput]; for (int i = 0; i < nn.NInput; i++) { System.Console.Write("in[" + i + "] = "); inp[i] = double.Parse(System.Console.ReadLine()); } System.Console.WriteLine(); double[] ans = nn.feedResult(inp); for (int i = 0; i < nn.NOutput; i++) { Console.WriteLine("ou[" + i + "] = " + Round(ans[i], 5)); } Console.WriteLine("".PadRight(10, '=')); } } catch { } }
public static void TestLoop(this vnn nn, Func <double[], double[]> answerkey, Func <string, double[]> translator) { try { while (true) { double[] inp = new double[nn.NInput]; Console.Write("in = "); inp = translator(Console.ReadLine()); Console.WriteLine(); double[] ans = nn.feedResult(inp); double[] corr = answerkey(ans); for (int i = 0; i < nn.NOutput; i++) { Console.WriteLine("ou[{0}] = {1:0.000}; co[{0}] = {2:0.000}; dx[{0}] = {3:0.000};", i, ans[i], corr[i], Abs(corr[i] - ans[i])); } Console.WriteLine("".PadRight(10, '=')); } } catch { } }
void test() { vnn nn = new vnn(2, 1000, 2, addon.RandomizeWeights); trainer tr = new trainer(nn, data.DataSets.twoParamTest, 0.01, 0.9); tr.TrainUntilAccuracy(0.9, 0.1); int counter = 0; double mean = 0.0; foreach (var v in nn.wHiddenOutput) { mean += Abs(v); if (Abs(v) < 1.0 / nn.nHidden) { counter++; } } mean = mean / nn.wHiddenOutput.Length; WriteLine("weak = " + counter); WriteLine("% = " + counter / (double)nn.wHiddenOutput.Length); WriteLine("Mean = " + mean + "; level = " + 1.0 / nn.nHidden); }
public randomUpdateTrainer(vnn nn, trainingSet tset, double learningRate, double momentum) : base(nn, tset, learningRate, momentum) { }
public void SimpleVNNForwardSpeedTest() { var nn = new vnn(INPUT_SIZE, HIDDEN_SIZE, OUTPUT_SIZE, addon.RandomizeWeights); WriteLine($"Time: {measureForwardSpeed(nn)}"); }
public MyGenericGenome(vnn nn) : base(nn) { }
private MyNeuralishGenome(vnn dna) : base(dna) { }