Esempio n. 1
0
        // Might not have to load statically, but it's easy for now.
        public static bool LoadNet(ref NeuralNet bestNet, string path)
        {
            if (File.Exists(path))
            {
                /* 0 - float - Best score
                 * 1 - int - Amount of layers in net
                 * 2 - int - Amount of neurons in layer
                 * 3 - int - amount of floats in set for neuron
                 * 4/5 - float - Weight/slope data
                 * f i i i fffffff i ffffffff etc
                 * 0 1 2 3 45454545 3 45454545 3 454545 2 3 45454545 etc
                 */
                float best = 0;
                using (BinaryReader reader = new BinaryReader(File.Open(path, FileMode.Open)))
                {
                    best = reader.ReadSingle();
                    int layers = reader.ReadInt32();
                    bestNet           = new NeuralNet(layers);
                    bestNet.bestScore = best;
                    for (int i = layers - 1; i >= 0; i--)
                    {
                        int neurons = reader.ReadInt32();
                        //bestNet.Neurons = neurons; // Temp
                        if (i == layers - 1) // Output layer
                        {
                            bestNet.array[i] = new NeuronList(null, neurons);
                        }
                        else
                        {
                            bestNet.array[i] = new NeuronList(bestNet.array[i + 1], neurons);
                        }

                        float[] weights;
                        float[] slopes;
                        for (int n = 0; n < neurons; n++)
                        {
                            int size = reader.ReadInt32();
                            weights = new float[size / 2];
                            slopes  = new float[size / 2];
                            for (int d = 0; d < size; d += 2)
                            {
                                weights[d / 2] = reader.ReadSingle();
                                slopes[d / 2]  = reader.ReadSingle();
                            }
                            bestNet.array[i].array[n] = new Neuron(weights, slopes);
                        }
                    }
                }
                return(true);
            }
            return(false);
        }
Esempio n. 2
0
        public NeuralNet Copy()
        {
            NeuronList connections = null;
            NeuralNet  ret         = new NeuralNet(array.Length);

            //ret.bestScore = bestScore;
            ret.count = count;
            //ret.Neurons = Neurons;
            for (int i = array.Length - 1; i >= 0; i--)
            {
                ret.array[i] = connections = array[i].Copy(connections);
            }
            return(ret);
        }
Esempio n. 3
0
        public void LearnSessionNoOutput(float[][] lessonplan, int bestof)
        {
            bool Kill = false;

            //int practice = 100; // Doesn't matter what it is, usually doesn't go above 20 anyway.

            NeuralNet[] neuralNets = new NeuralNet[TASKS];
            float[]     curScore   = new float[TASKS];
            Console.WriteLine("[Ctrl+D] to stop learning.");


            Task Listen = Task.Factory.StartNew(() =>
            {
                ConsoleKeyInfo key;
                while (!Kill)
                {
                    if (Console.KeyAvailable)
                    {
                        key = Console.ReadKey(true);
                        if (key.Key == ConsoleKey.D && key.Modifiers == ConsoleModifiers.Control)
                        {
                            Kill = true;
                        }
                    }
                }
            });

            for (int h = 0; h < bestof; h++)
            {
                int curBest     = -1;
                int currentLeft = Console.CursorLeft;
                int currentTop  = Console.CursorTop;
                //int tryNew = new Random().Next(24);
                NeuralNet randNet = null;
                if (h > 0 && h % 10 == 0)
                {
                    Random rand = new Random(DateTime.Now.Millisecond);
                    randNet = new NeuralNet(rand.Next(Math.Max(3, array.Length - 2), array.Length + 3), 6, rand.Next(Math.Max(1, array[1].array.Length - 2), array[1].array.Length + 3), 1, rand);
                }

                for (int ts = 0; ts < taskArray.Length; ts++)
                {
                    int t = ts;
                    taskArray[ts] = Task.Factory.StartNew(() =>
                    {
                        Random rand = new Random(DateTime.Now.Millisecond + ((int)Task.CurrentId * 478));

                        float lcurScore = 0, llcurScore = -1, lllcurScore = -2, llllcurScore = -3;
                        if (randNet != null)
                        {
                            neuralNets[t] = randNet.MutateCopy(rand);//new NeuralNet(rand.Next(Math.Max(3, bestNet.array.Length - 2), bestNet.array.Length + 3), 6, rand.Next(Math.Max(1, bestNet.array[1].array.Length - 2), bestNet.array[1].array.Length + 3), 1, rand);
                        }
                        else
                        {
                            neuralNets[t] = MutateCopy(rand);
                        }
                        for (int i = 0; i < 33; i++)
                        {
                            float[] res = new float[lessonplan.Length / 2];
                            for (int j = 0; j < lessonplan.Length; j += 2)
                            {
                                res[j / 2] = neuralNets[t].Learn(lessonplan[j], lessonplan[j + 1]);
                            }

                            if (Kill)
                            {
                                curScore = new float[TASKS];
                                break;
                            }

                            learnLen    = res.Length;
                            curScore[t] = 0;
                            for (int j = 0; j < res.Length; j++)
                            {
                                float accu = res[j];
                                if (accu < ACC_THRESHOLD - 0.1f)
                                {
                                    curScore[t] += 1f;
                                }
                                curScore[t] += 1f - accu;
                            }

                            if ((curScore[t] == llcurScore && curScore[t] == lllcurScore && curScore[t] == llllcurScore && curScore[t] == lcurScore) || i == 32)
                            {    // Probably a better way to do this. Checks if we're not getting anywhere
                                break;
                            }
                            if (curScore[t] < bestScore / 2.035) //(res.Length / 7.0f))
                                                                 // "res.length/?" lowered will allow more offshoots to potentially grow. Slower but maybe better results? Probably not.
                            {
                                break;
                            }

                            llllcurScore = llcurScore;
                            lllcurScore  = lcurScore;
                            llcurScore   = lcurScore;
                            lcurScore    = curScore[t];
                        }
                    });
                }

                Task.WaitAll(taskArray);

                for (int i = 0; i < curScore.Length; i++)
                {
                    if (curScore[i] > bestScore)
                    {
                        bestScore = curScore[i];
                        curBest   = i;
                    }
                }
                if (curBest >= 0)
                {
                    CopyFrom(neuralNets[curBest]);
                }
                if (bestScore >= (learnLen * 2.0f) - 0.1f || Kill)
                {
                    break;
                }
            }

            Kill = true;

            Console.WriteLine("Done Test");
        }
Esempio n. 4
0
        public void LearnSession(float[][] lessonplan, int bestof)
        {
            bool Kill = false;

            //int practice = 100; // Doesn't matter what it is, usually doesn't go above 20 anyway.
            Console.WriteLine();

            NeuralNet[] neuralNets = new NeuralNet[TASKS];
            float[]     curScore   = new float[TASKS];
            Console.WriteLine("[Ctrl+D] to stop learning.\n\nTIME\t\tNUM\tBEST/{0}\t\tCAP\t%ACC", TASKS);


            Task Listen = Task.Factory.StartNew(() =>
            {
                ConsoleKeyInfo key;
                while (!Kill)
                {
                    if (Console.KeyAvailable)
                    {
                        key = Console.ReadKey(true);
                        if (key.Key == ConsoleKey.D && key.Modifiers == ConsoleModifiers.Control)
                        {
                            Kill = true;
                        }
                    }
                }
            });

            for (int h = 0; h < bestof; h++)
            {
                int curBest     = -1;
                int currentLeft = Console.CursorLeft;
                int currentTop  = Console.CursorTop;
                //int tryNew = new Random().Next(24);
                NeuralNet randNet = null;
                if (h > 0 && h % 10 == 0)
                {
                    Random rand = new Random(DateTime.Now.Millisecond);
                    randNet = new NeuralNet(rand.Next(Math.Max(3, array.Length - 2), array.Length + 3), 6, rand.Next(Math.Max(1, array[1].array.Length - 2), array[1].array.Length + 3), 1, rand);
                }

                for (int ts = 0; ts < taskArray.Length; ts++)
                {
                    int t = ts;
                    taskArray[ts] = Task.Factory.StartNew(() =>
                    {
                        Random rand = new Random(DateTime.Now.Millisecond + ((int)Task.CurrentId * 478));

                        float lcurScore = 0, llcurScore = -1, lllcurScore = -2, llllcurScore = -3;
                        if (randNet != null)
                        {
                            neuralNets[t] = randNet.MutateCopy(rand);//new NeuralNet(rand.Next(Math.Max(3, bestNet.array.Length - 2), bestNet.array.Length + 3), 6, rand.Next(Math.Max(1, bestNet.array[1].array.Length - 2), bestNet.array[1].array.Length + 3), 1, rand);
                        }
                        else
                        {
                            neuralNets[t] = MutateCopy(rand);
                        }
                        lock (_lock)
                        {
                            Console.SetCursorPosition(currentLeft + 94 + TASKS, currentTop);
                            Console.Write(neuralNets[t].GetID().PadRight(16));
                            Console.SetCursorPosition(currentLeft, currentTop);
                        }
                        for (int i = 0; i < 33; i++)
                        {
                            if (t == 0)
                            {
                                lock (_lock)
                                {
                                    Console.SetCursorPosition(currentLeft + 60, currentTop);
                                    Console.Write("                              " + i.ToString().PadRight(2));
                                    Console.SetCursorPosition(currentLeft, currentTop);
                                }
                            }
                            float[] res = new float[lessonplan.Length / 2];
                            for (int j = 0; j < lessonplan.Length; j += 2)
                            {
                                res[j / 2] = neuralNets[t].Learn(lessonplan[j], lessonplan[j + 1]);
                                if (t == 0)
                                {
                                    lock (_lock)
                                    {
                                        int conloc = (int)((float)j / lessonplan.Length * 30);
                                        Console.SetCursorPosition(currentLeft + 60 + conloc, currentTop);
                                        Console.Write(conloc % 2 == 0 ? '-' : '=');
                                        Console.SetCursorPosition(currentLeft, currentTop);
                                    }
                                }
                            }

                            if (Kill)
                            {
                                curScore = new float[TASKS];
                                break;
                            }

                            learnLen    = res.Length;
                            curScore[t] = 0;
                            for (int j = 0; j < res.Length; j++)
                            {
                                float accu = res[j];
                                if (accu < ACC_THRESHOLD - 0.1f)
                                {
                                    curScore[t] += 1f;
                                }
                                curScore[t] += 1f - accu;
                            }

                            if ((curScore[t] == llcurScore && curScore[t] == lllcurScore && curScore[t] == llllcurScore && curScore[t] == lcurScore) || i == 32)
                            {    // Probably a better way to do this. Checks if we're not getting anywhere
                                break;
                            }
                            if (curScore[t] < bestScore / 2.035) //(res.Length / 7.0f))
                                                                 // "res.length/?" lowered will allow more offshoots to potentially grow. Slower but maybe better results? Probably not.
                            {
                                break;
                            }

                            llllcurScore = llcurScore;
                            lllcurScore  = lcurScore;
                            llcurScore   = lcurScore;
                            lcurScore    = curScore[t];
                        }
                        lock (_lock)
                        {
                            Console.SetCursorPosition(currentLeft + 93 + t, currentTop);
                            Console.Write(curScore[t] > bestScore ? 'o' : '.');
                            Console.SetCursorPosition(currentLeft, currentTop);
                        }
                    });
                }
                lock (_lock)
                {
                    Console.SetCursorPosition(currentLeft, currentTop);
                    Console.WriteLine("[" + DateTime.Now.ToLongTimeString() + "]\t" + h.ToString("d4") + "\t" + "WORKING" + "\t\t" + learnLen * 2 + "\t" + "WORKING".PadRight(78));
                    Console.SetCursorPosition(currentLeft, currentTop);
                }

                Task.WaitAll(taskArray);

                for (int i = 0; i < curScore.Length; i++)
                {
                    if (curScore[i] > bestScore)
                    {
                        bestScore = curScore[i];
                        curBest   = i;
                    }
                }
                if (curBest >= 0)
                {
                    CopyFrom(neuralNets[curBest]);
                    Console.WriteLine("[" + DateTime.Now.ToLongTimeString() + "]\t" + h.ToString("d4") + "\t" + bestScore.ToString("f5").PadRight(8) + "\t" + learnLen * 2 + "\t" + ((bestScore / (learnLen * 2)) * 100).ToString("f2") + "\t");
                }
                if (bestScore >= (learnLen * 2.0f) - 0.1f || Kill)
                {
                    break;
                }
            }

            Kill = true;

            Console.WriteLine("[" + DateTime.Now.ToLongTimeString() + "]\tDone Test\t\t\t\t\t\t\t\n");
        }
Esempio n. 5
0
 public void CopyFrom(NeuralNet net)
 {
     array = (NeuronList[])net.array.Clone();
     count = net.count;
 }