Пример #1
0
 // Start is called before the first frame update
 void Start()
 {
     // Init neural network which contains 2 inputs, 2 hidden layers
     // (2 neurons each) and 1 output. It also uses Unity's random
     // function and step function (every neuron's output value is
     // 1 or 0)
     net = new NeuralNet.NeuralNetwork(2, new int[2] {
         2, 2
     }, 1, Random.Range, ActivationFunctions.Step);
 }
Пример #2
0
        public Genome(int NumInputs, int NumOutputs, int NeuronsPerHiddenLayer, int NumHiddenLayers)
        {
            Net = new NeuralNetwork();
            Net.NumInputs = NumInputs;
            Net.NumOutputs = NumOutputs;
            Net.NeuronsPerHiddenLayer = NeuronsPerHiddenLayer;
            Net.NumHiddenLayers = NumHiddenLayers;

            Fitness = 0;
        }
Пример #3
0
        /// <summary>
        /// Machine learning attempt 3
        /// </summary>
        public static void Main(String[] args)
        {
            NeuralNetwork nn = new NeuralNetwork(new UInt16[] { 1089, 16, 24, 4 }) /*{descent=Program.learnRate}*/;

            DateTime    start;
            TimeSpan    ts;
            List <Byte> answers;

            String[] dirs = Directory.GetDirectories(@".\Dataset\"), files;
            Byte[]   desiredAnswer;
            Random   r = new Random();
            String   dir, file;
            UInt32   correct = 0, incorrect = 0;
            Boolean  step = false, vis = false;

            /*
             * foreach (Byte @byte in
             *      nn.makePrediction(Util.imageToNeuralData(@"C:\Users\Elite\Documents\SharpDevelop Projects\NeuralNet\NeuralNet\bin\Debug\Dataset\0\number-1.png"),
             *                      new Byte[]{255,0,0,0,0,0,0,0,0,0},
             *                      false)
             *      )
             *      Console.WriteLine(@byte.ToString()+',');
             *
             * goto halt;
             */

trainSubRt:

            while (!(Console.KeyAvailable && Console.ReadKey(true).Key == ConsoleKey.Escape))
            {
                start = DateTime.UtcNow;

                desiredAnswer = new Byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
                dir           = (dirs[r.Next(0, 4)]);
                files         = Directory.GetFiles(dir);
                file          = files[r.Next(0, files.Length)];

                desiredAnswer[Byte.Parse(dir.Last().ToString())] = 255;

//				desiredAnswer=new Byte[]{255,0,0,0,0,0,0,0,0,0};
//				file=@"C:\Users\Elite\Documents\SharpDevelop Projects\NeuralNet\NeuralNet\bin\Debug\Dataset\0\0.png";
                answers = nn.makePrediction(Util.imageToNeuralData(file), desiredAnswer, true, vis, delegate(){ Program.Main(args); });

//				foreach (Byte @byte in answers)
//					Console.WriteLine(@byte.ToString()+',');

                ts = DateTime.UtcNow - start;

                Byte prediction = (Byte)(answers.IndexOf(answers.Max())), desiredPrediction = (Byte)(desiredAnswer.ToList().IndexOf(desiredAnswer.Max()));
                if (prediction == desiredPrediction)
                {
                    ++correct;
                }
                else
                {
                    ++incorrect;
                }
                Console.WriteLine("Answer: " + prediction.ToString() + ", desired: " + desiredPrediction.ToString() + ", R/W: " + correct.ToString() + '/' + incorrect.ToString() + "(" + ts.TotalMilliseconds.ToString() + "ms, " + file + ')');

                /*UInt32 total=correct+incorrect;
                 *
                 * if ((correct>incorrect&&((total)>8))||((total)>13000)) {
                 *
                 *      Console.WriteLine("HL size:"+Program.hlSize.ToString());
                 *
                 *      if (Program.hlSize==1) {
                 *
                 *              List<String> lines=new List<String>();
                 *              foreach (KeyValuePair<UInt16,List<UInt32>> kvp in Program.times) {
                 *                      String str="HL size: "+kvp.Key.ToString()+",Iterations (avg:"+kvp.Value.Select(x=>Convert.ToInt32(x)).Average().ToString()+"): ";
                 *                      foreach (UInt32 num in kvp.Value)
                 *                              str+=num.ToString()+',';
                 *                      lines.Add(str);
                 *              }
                 *
                 *              File.WriteAllLines("./results",lines);
                 *              return;
                 *
                 *      }
                 *
                 *      if (Program.loopCtr==Program.loopCtrMax)
                 *              Program.times.Add(Program.hlSize,new List<UInt32>(Program.loopCtrMax));
                 *
                 *      times[Program.hlSize].Add(total);
                 *
                 *      if (Program.loopCtr==0) {
                 *
                 *              --Program.hlSize;
                 *              Program.loopCtr=Program.loopCtrMax;
                 *
                 *      }
                 *      else--Program.loopCtr;
                 *
                 *      Program.Main(args);
                 *      return;
                 *
                 * }*/
                if (step && Console.ReadKey().Key == ConsoleKey.Escape)
                {
                    break;
                }
            }


            while (true)
            {
                String str = Console.ReadLine();
                if (str == "continue" || str == "train")
                {
                    goto trainSubRt;
                }
                else if (str == "break" || str == "stop")
                {
                    goto halt;
                }
                else if (str == "visual" || str == "visualize")
                {
                    vis = !vis;
                    goto trainSubRt;
                }
                else if (str == "step")
                {
                    step = !step;
                    goto trainSubRt;
                }
                else
                {
                    List <Byte> results
                        = nn.makePrediction(Util.imageToNeuralData(str),
                                            null,
                                            false,
                                            vis).ToList();

                    foreach (Byte @byte in results)
                    {
                        Console.Write(@byte.ToString() + ',');
                    }

                    Console.WriteLine("Your number was a: " + results.IndexOf(results.Max()).ToString());
                }
            }


halt:
            goto halt;
        }
Пример #4
0
        public static NeuralNetwork LoadNetworkFromFile(String filename)
        {
            NeuralNetwork neuralNetwork = null;

            try
            {
                StreamReader sr  = new StreamReader(filename);
                String       str = sr.ReadLine(); // headers
                String[]     strTab;
                str    = sr.ReadLine();
                strTab = str.Split(';');
                bool isNetworkUsingBias = bool.Parse(strTab[1]);

                str    = sr.ReadLine();
                strTab = str.Split(';');
                double minWeight = double.Parse(strTab[1]);

                str    = sr.ReadLine();
                strTab = str.Split(';');
                double maxWeight = double.Parse(strTab[1]);

                str    = sr.ReadLine();
                strTab = str.Split(';');
                LearningMethod methodOfLearning = strTab[1] == "0" ? LearningMethod.LINEAR : LearningMethod.NOT_LINEAR;

                str    = sr.ReadLine();
                strTab = str.Split(';');
                String name = strTab[1];

                String[] layersNeuronsStr                   = sr.ReadLine().Split(';'); // neurons in layers
                String[] layersActivationFunctionStr        = sr.ReadLine().Split(';'); // neurons in layers
                List <LayerCreationInfo> layerCreationInfos = new List <LayerCreationInfo>();

                for (int i = 1; i < layersNeuronsStr.Length; i++)
                {
                    int layerIdx          = i - 1;
                    LayerCreationInfo lci = new LayerCreationInfo();
                    lci.HowManyNeuronsPerLayer = int.Parse(layersNeuronsStr[i]);
                    lci.LayerNo = layerIdx;
                    lci.PreviousLayerNeuronsCount = layerIdx == 0 ? 0 : layerCreationInfos[layerIdx - 1].HowManyNeuronsPerLayer;

                    int LayerActivationFunctionInt = int.Parse(layersActivationFunctionStr[i]);
                    lci.LayerActivationFunction = GetActivationFunctionById(LayerActivationFunctionInt);

                    layerCreationInfos.Add(lci);
                }

                Topology topology = new Topology(layerCreationInfos, isNetworkUsingBias, minWeight, maxWeight);

                neuralNetwork = new NeuralNetwork(topology, minWeight, maxWeight, methodOfLearning, name);

                // provide saved neurons weights:
                for (int layerNo = 0; layerNo < neuralNetwork.Topology.Layers.Count; layerNo++)
                {
                    Layer layer = neuralNetwork.Topology.Layers[layerNo];

                    for (int neuronNo = 0; neuronNo < layer.Neurons.Count; neuronNo++)
                    {
                        String[] inputsString = sr.ReadLine().Split(';');


                        for (int inputNo = 0; inputNo < layer.Neurons[neuronNo].Inputs.Count; inputNo++)
                        {
                            layer.Neurons[neuronNo].Inputs[inputNo].Weight = double.Parse(inputsString[inputNo + 1]);
                        }
                    }
                }

                neuralNetwork.PropagateValuesForward(); // maybe not needed now, but for order it's good to get network in proper state.
                return(neuralNetwork);
            }
            catch (Exception ex)
            {
                Console.WriteLine("Error while loading network: " + ex.Message);

                return(null);
            }
        }