Ejemplo n.º 1
0
    public NeuralNet GetMutatedCopy(float mutationStrength, int randomSynapses)
    {
        //Debug.Log("BEFORE:");
        //PrintNet();
        NeuralNet net = new NeuralNet(numberOfNeurons, firingTime, downTime);
        for (int i = 0; i < numberOfNeurons; i++)
        {
            net.synapses[i] = new Dictionary<int, float>();
            foreach (int j in synapses[i].Keys)
            {
                float val = synapses[i][j] + Random.Range(-mutationStrength, mutationStrength);
                val = val > 2f ? 2f : val;
                val = val < -1f ? -1f : val;
                if (Mathf.Abs(val) > 0.1f)
                {
                    net.synapses[i].Add(j, val);
                }
            }
        }
        net.AddRandomSynapses(randomSynapses);
        //Debug.Log("AFTER:");
        //net.PrintNet();

        return net;
    }
Ejemplo n.º 2
0
        static void Main()
        {
            const uint num_input = 3;
            const uint num_output = 1;
            const uint num_layers = 4;
            const uint num_neurons_hidden = 5;
            const float desired_error = 0.0001F;
            const uint max_epochs = 5000;
            const uint epochs_between_reports = 1000;
            using(NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_neurons_hidden, num_neurons_hidden, num_output))
            {
                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.LINEAR;
                net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_RPROP;
                using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\scaling.data"))
                {
                    net.SetScalingParams(data, -1, 1, -1, 1);
                    net.ScaleTrain(data);

                    net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error);
                    net.Save("..\\..\\..\\datasets\\scaling.net");

                    Console.ReadKey();
                }
            }
        }
        public void Constructor_CreatesCorrectNumberOfLayers()
        {
            NeuralNet net = new NeuralNet(neuronsPerLayer: new[] { 1, 3, 2 });

            Assert.AreEqual(3, net.Layers.Count);
            Assert.IsTrue(net.Layers.All(layer => layer != null));
        }
Ejemplo n.º 4
0
        static void Main()
        {
            const uint num_layers = 3;
            const uint num_neurons_hidden = 96;
            const float desired_error = 0.001F;

            using (TrainingData trainData = new TrainingData("..\\..\\..\\datasets\\robot.train"))
            using (TrainingData testData = new TrainingData("..\\..\\..\\datasets\\robot.test"))
            {
                for (float momentum = 0.0F; momentum < 0.7F; momentum += 0.1F)
                {
                    Console.WriteLine("============= momentum = {0} =============\n", momentum);
                    using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, trainData.InputCount, num_neurons_hidden, trainData.OutputCount))
                    {
                        net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_INCREMENTAL;

                        net.LearningMomentum = momentum;

                        net.TrainOnData(trainData, 20000, 5000, desired_error);

                        Console.WriteLine("MSE error on train data: {0}", net.TestData(trainData));
                        Console.WriteLine("MSE error on test data: {0}", net.TestData(testData));
                    }

                }
            }
            Console.ReadKey();
        }
Ejemplo n.º 5
0
        static void Main()
        {
            DataType[] calc_out;
            Console.WriteLine("Creating network.");

            using(NeuralNet net = new NeuralNet("..\\..\\..\\examples\\scaling.net"))
            {
                net.PrintConnections();
                net.PrintParameters();
                Console.WriteLine("Testing network.");
                using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\scaling.data"))
                {
                    for (int i = 0; i < data.TrainDataLength; i++)
                    {
                        net.ResetMSE();
                        net.ScaleInput(data.GetTrainInput((uint)i));
                        calc_out = net.Run(data.GetTrainInput((uint)i));
                        net.DescaleOutput(calc_out);
                        Console.WriteLine("Result {0} original {1} error {2}", calc_out[0], data.OutputAccessor[i][0],
                                          FannAbs(calc_out[0] - data.OutputAccessor[i][0]));
                    }
                        Console.ReadKey();
                }
            }
        }
Ejemplo n.º 6
0
        static void Main(string[] argv)
        {
            const uint max_epochs = 1000;
            uint num_threads = 1;
            TrainingData data;
            NeuralNet net;
            long before;
            float error;

            if (argv.Length == 2)
                num_threads = UInt32.Parse(argv[1]);
            using (data = new TrainingData("..\\..\\..\\datasets\\mushroom.train"))
            using (net = new NeuralNet(NetworkType.LAYER, 3, data.InputCount, 32, data.OutputCount))
            {
                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.SIGMOID;

                before = Environment.TickCount;
                for (int i = 1; i <= max_epochs; i++)
                {
                    error = num_threads > 1 ? net.TrainEpochIrpropmParallel(data, num_threads) : net.TrainEpoch(data);
                    Console.WriteLine("Epochs     {0}. Current error: {1}", i.ToString("00000000"), error.ToString("0.0000000000"));
                }

                Console.WriteLine("ticks {0}", Environment.TickCount - before);
                Console.ReadKey();
            }
        }
        public void Constructor_EachLayerHasCorrectNumberOfNeurons()
        {
            NeuralNet net = new NeuralNet(neuronsPerLayer: new[] { 3, 2, 4 });

            Assert.AreEqual(3, net.Layers[0].Neurons.Count, "First layer");
            Assert.AreEqual(2, net.Layers[1].Neurons.Count, "Second layer");
            Assert.AreEqual(4, net.Layers[2].Neurons.Count, "Third layer");
        }
Ejemplo n.º 8
0
 public DirectNetwork(int inpNum, int classNum)
 {
     net = new NeuralNet(new string[] {
         "input:" + inpNum + ":1:1:1",
         "direct:" + (inpNum / 2 + 1),
         "direct:" + classNum
     });
 }
Ejemplo n.º 9
0
 public float [,] FeedForward(float [,] a, NeuralNet net)
 {
     for (int i = 0; i < net.weights.Length; i++)
     {
         a = Sigmoid(Add(Prod(net.weights[i], a), net.biases[i]));
     }
     return(a);
 }
Ejemplo n.º 10
0
 public Prediction(ProfitLossCalculator profitLossCalculator, StrategyI strategy, NeuralNet net, double buyLevel, double sellLevel)
 {
     ProfitLossCalculator = profitLossCalculator;
     Strategy             = strategy;
     Net       = net;
     BuyLevel  = buyLevel;
     SellLevel = sellLevel;
 }
Ejemplo n.º 11
0
        static void Main(string[] args)
        {
            var test    = new NeuralNet(2, 1, 3, 5);
            var andGate = new AndGate();
            var notGate = new NotGate();
            var orGate  = new OrGate();

            test.TrainWithData(orGate, 10000);
        }
Ejemplo n.º 12
0
        public void Spawn()
        {
            NeuralNet neuralNet = CreateInstance <NeuralNet>();

            neuralNet.Initialize(costFunction, layersNodes);
            string newAssetPath = string.Format("{0}/{1}.asset", AssetDatabase.GetAssetPath(path), fileName);

            AssetDatabase.CreateAsset(neuralNet, newAssetPath);
        }
Ejemplo n.º 13
0
    private void LoadNeuralNet()
    {
        var textFile = FileHelper.LoadTextResource(m_NetFilename);

        if (textFile != null)
        {
            m_Net = JsonUtility.FromJson <NeuralNet>(textFile.text);
        }
    }
Ejemplo n.º 14
0
 public Cop(String mask, Texture2D[] frames, abstractKeys keys, int engI, Engine eng, Texture2D FirstFrame, Vector2 position,
            Rectangle?sourceRectangle, Color color, float rotation, Vector2 origin, Vector2 scale,
            SpriteEffects effects, float layerDepth, PhysicsManager pm, float jf)
     : base(true, mask, frames, keys, engI, eng, FirstFrame, position, sourceRectangle, color, rotation, origin, scale,
            effects, layerDepth, pm, jf)
 {
     nn = new NeuralNet(inputNum, inputNum + 3, outputNum);
     Game1.Event_Update += shouldJump;
 }
Ejemplo n.º 15
0
        public Bird(Texture2D texture, Vector2 position, NeuralNet brain)
        {
            this.texture = texture;
            Position     = position;

            Brain = brain;

            Speed = Vector2.Zero;
        }
Ejemplo n.º 16
0
 public static LabelWithConfidence[] LabelWithConfidence(NeuralNet model, Dataset input)
 {
     LabelWithConfidence[] result = new LabelWithConfidence[input.Count()];
     for (int i = 0; i < input.Count(); i++)
     {
         result[i] = LabelWithConfidence(model, input.GetDatum(i), true);
     }
     return(result);
 }
Ejemplo n.º 17
0
        static int Main(string[] args)
        {
            int ret = 0;
            #if FANN_FIXED
            using (NeuralNet net = new NeuralNet("..\\..\\..\\examples\\xor_fixed.net"))
            #else
            using (NeuralNet net = new NeuralNet("..\\..\\..\\examples\\xor_float.net"))
            #endif
            {
                net.PrintConnections();
                net.PrintParameters();

                Console.WriteLine("Testing network.");

                using (TrainingData data = new TrainingData())
                {
            #if FANN_FIXED
                    if (!data.ReadTrainFromFile("..\\..\\..\\examples\\xor_fixed.data"))
            #else
                    if (!data.ReadTrainFromFile("..\\..\\..\\examples\\xor.data"))
            #endif
                    {
                        Console.WriteLine("Error reading training data --- ABORTING.\n");
                        return -1;
                    }
                    for (int i = 0; i < data.TrainDataLength; i++)
                    {
                        net.ResetMSE();
                        DataType[] calc_out = net.Test(data.GetTrainInput((uint)i).Array, data.GetTrainOutput((uint)i).Array);
            #if FANN_FIXED
                        Console.WriteLine("XOR test ({0}, {1}) - {2}, should be {3}, difference={4}",
                                            data.InputAccessor[i][0], data.InputAccessor[i][0],
                                            calc_out[0], data.OutputAccessor[i][0],
                                            (float) fann_abs(calc_out[0] - data.OutputAccessor[i][0]) / net.Multiplier);

                        if ((float)fann_abs(calc_out[0] - data.OutputAccessor[i][0]) / net.Multiplier > 0.2)
                        {
                            Console.WriteLine("Test failed");
                            ret = -1;
                        }
            #else

                        Console.WriteLine("XOR test ({0}, {1}) -> {2}, should be {3}, difference={4}",
                            data.GetTrainInput((uint)i)[0],
                            data.GetTrainInput((uint)i)[1],
                            calc_out[0],
                            data.GetTrainOutput((uint)i)[0],
                            calc_out[0] - data.GetTrainOutput((uint)i)[0]);
            #endif

                    }
                    Console.WriteLine("Cleaning up.");
                }
            }
            Console.ReadKey();
            return ret;
        }
Ejemplo n.º 18
0
 public static int[] Label(NeuralNet model, Dataset input)
 {
     int[] result = new int[input.Count()];
     for (int i = 0; i < input.Count(); i++)
     {
         result[i] = Label(model, input.GetDatum(i), true);
     }
     return(result);
 }
Ejemplo n.º 19
0
    //Copying a NeuralNet
    public NeuralNet(NeuralNet netForCopy)
    {
        this.nnStructure = new int[netForCopy.nnStructure.Length];
        this.nnStructure = netForCopy.nnStructure;

        initaliseNeurons();
        copyNeurons(netForCopy.neurons);
        copyWeights(netForCopy.weights);
    }
Ejemplo n.º 20
0
 public DistanceNetwork(int width, int height, int deep, int bs, int classNum)
 {
     input  = new Tensor4(width, height, deep, bs);
     output = new Vector(classNum);
     net    = new NeuralNet(new string[] {
         "input:" + width + ":" + height + ":" + deep + ":" + bs,
         "convDistance:" + classNum
     });
 }
Ejemplo n.º 21
0
 static int TrainingCallback(NeuralNet net, TrainingData data, uint maxEpochs, uint epochsBetweenReports, float desiredError, uint epochs, object userData)
 {
     System.GC.Collect(); // Make sure nothing's getting garbage-collected prematurely
     GC.WaitForPendingFinalizers();
     Console.WriteLine("Callback: Last neuron weight: {0}, Last data input: {1}, Max epochs: {2}\nEpochs between reports: {3}, Desired error: {4}, Current epoch: {5}\nGreeting: \"{6}\"",
                       net.ConnectionArray[net.TotalConnections - 1].Weight, data.InputAccessor.Get((int)data.TrainDataLength - 1, (int)data.InputCount - 1),
                       maxEpochs, epochsBetweenReports, desiredError, epochs, userData);
     return(1);
 }
Ejemplo n.º 22
0
	// Load the neural net from the file.
	private void LoadNetFromFile()
	{
		BinaryFormatter bf = new BinaryFormatter();
		
		FileStream netFile = File.Open(fileName, FileMode.Open);
		neuralNet = (NeuralNet)bf.Deserialize(netFile);
		neuralNet.ParentFrog = gameObject;
		netFile.Close();
	}
Ejemplo n.º 23
0
 private void FillPopulationWithRandomValues(NeuralNet[] newPopulation, int startingIndex)
 {
     while (startingIndex < initialPopulation)
     {
         newPopulation[startingIndex] = new NeuralNet();
         newPopulation[startingIndex].Initialise(controller.LAYERS, controller.NEURONS);
         startingIndex++;
     }
 }
Ejemplo n.º 24
0
        public TestResultBase Test(NeuralNet <T> network, IDataSet <T> testSet)
        {
            network.ResetMemory();
            var result = TestInternal(network, testSet);

            OnTestReport(result);

            return(result);
        }
Ejemplo n.º 25
0
        public void NeuralNetwork_CanCreate()
        {
            var ann  = new NeuralNet();
            var relu = new Relu();

            var ans = relu.Apply(5);

            Assert.That(ans, Is.EqualTo(2));
        }
Ejemplo n.º 26
0
 public void enableAI(NeuralNet brain)
 {
     this.brain = brain;
     AIEnablied = true;
     foreach (ThrusterController thruster in thrusterControllers)
     {
         thruster.firing = true;
     }
 }
Ejemplo n.º 27
0
 public Ensemble()
 {
     for (int i = 0; i < 4; i++)
     {
         //input, hidden layer, output
         nn = new NeuralNet(FANNCSharp.NetworkType.LAYER, new uint[] { 7, 5, 1 });
         neuralNetList.Add(nn);
     }
 }
Ejemplo n.º 28
0
        static int Main(string[] args)
        {
            int ret = 0;

#if FANN_FIXED
            using (NeuralNet net = new NeuralNet("..\\..\\..\\examples\\xor_fixed.net"))
#else
            using (NeuralNet net = new NeuralNet("..\\..\\..\\examples\\xor_float.net"))
#endif
            {
                net.PrintConnections();
                net.PrintParameters();

                Console.WriteLine("Testing network.");

                using (TrainingData data = new TrainingData())
                {
#if FANN_FIXED
                    if (!data.ReadTrainFromFile("..\\..\\..\\examples\\xor_fixed.data"))
#else
                    if (!data.ReadTrainFromFile("..\\..\\..\\examples\\xor.data"))
#endif
                    {
                        Console.WriteLine("Error reading training data --- ABORTING.\n");
                        return(-1);
                    }
                    for (int i = 0; i < data.TrainDataLength; i++)
                    {
                        net.ResetMSE();
                        DataType[] calc_out = net.Test(data.GetTrainInput((uint)i).Array, data.GetTrainOutput((uint)i).Array);
#if FANN_FIXED
                        Console.WriteLine("XOR test ({0}, {1}) - {2}, should be {3}, difference={4}",
                                          data.InputAccessor[i][0], data.InputAccessor[i][0],
                                          calc_out[0], data.OutputAccessor[i][0],
                                          (float)fann_abs(calc_out[0] - data.OutputAccessor[i][0]) / net.Multiplier);


                        if ((float)fann_abs(calc_out[0] - data.OutputAccessor[i][0]) / net.Multiplier > 0.2)
                        {
                            Console.WriteLine("Test failed");
                            ret = -1;
                        }
#else
                        Console.WriteLine("XOR test ({0}, {1}) -> {2}, should be {3}, difference={4}",
                                          data.GetTrainInput((uint)i)[0],
                                          data.GetTrainInput((uint)i)[1],
                                          calc_out[0],
                                          data.GetTrainOutput((uint)i)[0],
                                          calc_out[0] - data.GetTrainOutput((uint)i)[0]);
#endif
                    }
                    Console.WriteLine("Cleaning up.");
                }
            }
            Console.ReadKey();
            return(ret);
        }
Ejemplo n.º 29
0
            public Neuron(NeuralNet net, int layer, int index, int inputWeightCount)
            {
                Net   = net;
                Layer = layer;
                Index = index;

                Bias         = GetSmallRandomNumber();
                InputWeights = Enumerable.Range(0, inputWeightCount).Select(_ => GetSmallRandomNumber()).ToArray();
            }
Ejemplo n.º 30
0
        public async Task AddNeuralNet(NeuralNet neuralNet)
        {
            using (var dbContext = new ApplicationContext(_configuration["connectionString"]))
            {
                await dbContext.NeuralNets.AddAsync(neuralNet).ConfigureAwait(false);

                await dbContext.SaveChangesAsync().ConfigureAwait(false);
            }
        }
        public void Classification_Neural_Net_Using_ValidtionSet_For_Selecting_The_best_Model()
        {
            #region Read Data
            // Use StreamReader(filepath) when running from filesystem
            var trainingParser = new CsvParser(() => new StringReader(Resources.mnist_small_train));
            var testParser     = new CsvParser(() => new StringReader(Resources.mnist_small_test));

            var targetName = "Class";

            var featureNames = trainingParser.EnumerateRows(c => c != targetName).First().ColumnNameToIndex.Keys.ToArray();

            // read feature matrix (training)
            var trainingObservations = trainingParser
                                       .EnumerateRows(featureNames)
                                       .ToF64Matrix();
            // read classification targets (training)
            var trainingTargets = trainingParser.EnumerateRows(targetName)
                                  .ToF64Vector();

            // read feature matrix (test)
            var testObservations = testParser
                                   .EnumerateRows(featureNames)
                                   .ToF64Matrix();
            // read classification targets (test)
            var testTargets = testParser.EnumerateRows(targetName)
                              .ToF64Vector();
            #endregion

            // transform pixel values to be between 0 and 1.
            trainingObservations.Map(p => p / 255);
            testObservations.Map(p => p / 255);

            // create training validation split
            var splitter = new StratifiedTrainingTestIndexSplitter <double>(trainingPercentage: 0.7, seed: 24);
            var split    = splitter.SplitSet(trainingObservations, trainingTargets);

            // the output layer must know the number of classes.
            var numberOfClasses = trainingTargets.Distinct().Count();

            var net = new NeuralNet();
            net.Add(new InputLayer(width: 28, height: 28, depth: 1)); // MNIST data is 28x28x1.
            net.Add(new DenseLayer(800, Activation.Relu));
            net.Add(new SoftMaxLayer(numberOfClasses));

            // using classification accuracy as error metric.
            // When using a validation set, the error metric
            // is used for selecting the best iteration based on models error on the validation set.
            var learner = new ClassificationNeuralNetLearner(net, iterations: 10, loss: new AccuracyLoss());

            var model = learner.Learn(split.TrainingSet.Observations, split.TrainingSet.Targets, //);
                                      split.TestSet.Observations, split.TestSet.Targets);        // the validation set for estimating how well the network generalises to new data.

            var metric      = new TotalErrorClassificationMetric <double>();
            var predictions = model.Predict(testObservations);

            Trace.WriteLine("Test Error: " + metric.Error(testTargets, predictions));
        }
Ejemplo n.º 32
0
 public Game(NeuralNet nn, double[] weights, double[] L1bias, double[] L2bias)
 {
     h             = 0f;
     remainingCups = new bool[10];
     this.weights  = weights;
     this.L1bias   = L1bias;
     this.L2bias   = L2bias;
     this.nn       = nn;
 }
Ejemplo n.º 33
0
    // Use this for initialization
    void Start()
    {
        //Input - 3 (r,g,b) -- Output - 1 (Black/White)
        net      = new NeuralNet(config, visualisationConfig);// (3, 4, 1, 1, 0.3f, 0.8f, 100);
        dataSets = new List <DataSet>();
        Next();

        neuralNetRenderer.InitRender(net);
    }
        public void Classification_Convolutional_Neural_Net()
        {
            #region Read Data

            // Use StreamReader(filepath) when running from filesystem
            var trainingParser = new CsvParser(() => new StringReader(Resources.mnist_small_train));
            var testParser     = new CsvParser(() => new StringReader(Resources.mnist_small_test));

            var targetName = "Class";

            var featureNames = trainingParser.EnumerateRows(c => c != targetName).First().ColumnNameToIndex.Keys.ToArray();

            // read feature matrix (training)
            var trainingObservations = trainingParser
                                       .EnumerateRows(featureNames)
                                       .ToF64Matrix();
            // read classification targets (training)
            var trainingTargets = trainingParser.EnumerateRows(targetName)
                                  .ToF64Vector();

            // read feature matrix (test)
            var testObservations = testParser
                                   .EnumerateRows(featureNames)
                                   .ToF64Matrix();
            // read classification targets (test)
            var testTargets = testParser.EnumerateRows(targetName)
                              .ToF64Vector();
            #endregion

            // transform pixel values to be between 0 and 1.
            trainingObservations.Map(p => p / 255);
            testObservations.Map(p => p / 255);

            // the output layer must know the number of classes.
            var numberOfClasses = trainingTargets.Distinct().Count();

            // define the neural net.
            var net = new NeuralNet();
            net.Add(new InputLayer(width: 28, height:  28, depth: 1)); // MNIST data is 28x28x1.
            net.Add(new Conv2DLayer(filterWidth: 5, filterHeight: 5, filterCount: 32));
            net.Add(new MaxPool2DLayer(poolWidth: 2, poolHeight: 2));
            net.Add(new DropoutLayer(0.5));
            net.Add(new DenseLayer(256, Activation.Relu));
            net.Add(new DropoutLayer(0.5));
            net.Add(new SoftMaxLayer(numberOfClasses));

            // using only 10 iteration to make the example run faster.
            // using classification accuracy as error metric. This is only used for reporting progress.
            var learner = new ClassificationNeuralNetLearner(net, iterations: 10, loss: new AccuracyLoss());
            var model   = learner.Learn(trainingObservations, trainingTargets);

            var metric      = new TotalErrorClassificationMetric <double>();
            var predictions = model.Predict(testObservations);

            Trace.WriteLine("Test Error: " + metric.Error(testTargets, predictions));
        }
Ejemplo n.º 35
0
        public void TestMethodDecimalAdaptive()
        {
            var inputs = new List <List <double> >()
            {
                new List <double>()
                {
                    0.5, 0.2, 0.9
                },
                new List <double>()
                {
                    0.9, 0.5, 0.2
                },
                new List <double>()
                {
                    0.2, 0.9, 0.5
                }
            };

            var expectedResults = new List <List <double> >()
            {
                new List <double>()
                {
                    0.9, 0.1, 0.1
                },
                new List <double>()
                {
                    0.1, 0.9, 0.1
                },
                new List <double>()
                {
                    0.1, 0.1, 0.9
                },
            };

            for (int i = 1; i < 4; i++)
            {
                Link.Step = 5 * i;
                //  Link.RenewalFactor *= 10;
                var sut        = new NeuralNet(2, 30, inputs[0], expectedResults[0]);
                var iterations = sut.TrainAdaptive(inputs, expectedResults, -0.01);

                var path = @"C:\Users\Serban\Pictures\LeafsVeins\" + nameof(TestMethodDecimalAdaptive) + ".txt";
                File.AppendAllText(path,
                                   " Iterations: " + iterations +
                                   " Step: " + Link.Step +
                                   " Renewal: " + Link.RenewalFactor + Environment.NewLine);
                sut.PrintWeights(path, iterations);

                sut        = new NeuralNet(2, 30, inputs[0], expectedResults[0]);
                iterations = sut.TrainAdaptive(inputs, expectedResults, -0.01);

                path = @"C:\Users\Serban\Pictures\LeafsVeins\" + nameof(TestMethodDecimalAdaptive) + ".txt";

                sut.PrintWeights(path, iterations);
            }
        }
Ejemplo n.º 36
0
    // Start is called before the first frame update
    void Start()
    {
        net = new NeuralNet();
        net.Add(new InputLayer(2));
        net.Add(new DenseLayer(8));
        net.Add(new DenseLayer(8));
        net.Add(new SquaredErrorRegressionLayer());

        learner = new RegressionNeuralNetLearner(net, new SquareLoss(), iterations: 1, batchSize: 1);
    }
        public void RegressionNeuralNetLearner_Constructor_Throw_On_Wrong_OutputLayerType()
        {
            var net = new NeuralNet();

            net.Add(new InputLayer(10));
            net.Add(new DenseLayer(10));
            net.Add(new SvmLayer(10));

            var sut = new RegressionNeuralNetLearner(net, new AccuracyLoss());
        }
Ejemplo n.º 38
0
        public OptimizingTrainer(NeuralNet <T> network, OptimizerBase <T> optimizer, IDataSet <T> trainingSet, OptimizingTrainerOptions options, OptimizingSession session) : base(options, session)
        {
            _network    = network;
            _optimizer  = optimizer;
            TrainingSet = trainingSet;

            // TODO: This is not very good.
            session.Optimizer = optimizer;
            session.Network   = network;
        }
Ejemplo n.º 39
0
        static void Main(string[] args)
        {
            DataType[] calc_out;
            const uint num_input = 2;
            const uint num_output = 1;
            const uint num_layers = 3;
            const uint num_neurons_hidden = 3;
            const float desired_error =  0;
            const uint max_epochs = 1000;
            const uint epochs_between_reports = 10;

            int decimal_point;

            Console.WriteLine("Creating network.");
            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_neurons_hidden, num_output))
            using (TrainingData data = new TrainingData("..\\..\\..\\examples\\xor.data"))
            {
                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.SIGMOID_SYMMETRIC;

                net.TrainStopFunction = StopFunction.STOPFUNC_BIT;
                net.BitFailLimit = 0.01F;

                net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_RPROP;

                net.InitWeights(data);

                Console.WriteLine("Training network.");
                net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error);

                Console.WriteLine("Testing network");
                // Keep a copy of the inputs and outputs so that we don't call TrainingData.Input
                // and TrainingData.Output multiple times causing a copy of all the data on each
                // call. An alternative is to use the Input/OutputAccessors which are fast with
                // repeated calls to get data and can be cast to arrays with the Array property
                DataType[][] input = data.Input;
                DataType[][] output = data.Output;
                for (int i = 0; i < data.TrainDataLength; i++)
                {
                    calc_out = net.Run(input[i]);
                    Console.WriteLine("XOR test ({0},{1}) -> {2}, should be {3}, difference={4}",
                                        input[i][0], input[i][1], calc_out[0], output[i][0],
                                        FannAbs(calc_out[0] - output[i][0]));
                }

                Console.WriteLine("Saving network.\n");

                net.Save("..\\..\\..\\examples\\xor_float.net");

                decimal_point = net.SaveToFixed("..\\..\\..\\examples\\xor_fixed.net");
                data.SaveTrainToFixed("..\\..\\..\\examples\\xor_fixed.data", (uint)decimal_point);

                Console.ReadKey();
            }
        }
Ejemplo n.º 40
0
	public void Setup(Genome genome)
	{
		this.genome = genome;

		brain = new NeuralNet(RoddenberryGenome.NumInputs, RoddenberryGenome.NumOutputs, RoddenberryGenome.Layers, genome.genome, 0.5f);

		//set name label
		var label = GetComponentInChildren<UnityEngine.UI.Text>();
		if(label != null)
		{
			label.text = genome.name.Replace("Roddenberry ", "");
		}
	}
Ejemplo n.º 41
0
        static void Main()
        {
            const uint num_layers = 3;
            const uint num_neurons_hidden = 96;
            const float desired_error = 0.00007F;

            using (TrainingData trainData = new TrainingData())
            using (TrainingData testData = new TrainingData())
            {
                trainData.CreateTrainFromCallback(374, 48, 3, TrainingDataCallback);
                testData.CreateTrainFromCallback(594, 48, 3, TestDataCallback);

                // Test Accessor classes
                for (int i = 0; i < trainData.TrainDataLength; i++)
                {
                    Console.Write("Input {0}: ", i);
                    for (int j = 0; j < trainData.InputCount; j++)
                    {
                        Console.Write("{0}, ", trainData.InputAccessor[i][j]);
                    }
                    Console.Write("\nOutput {0}: ", i);
                    for (int j = 0; j < trainData.OutputCount; j++)
                    {
                        Console.Write("{0}, ", trainData.OutputAccessor[i][j]);
                    }
                    Console.WriteLine("");
                }

                for (float momentum = 0.0F; momentum < 0.7F; momentum += 0.1F)
                {
                    Console.WriteLine("============= momentum = {0} =============\n", momentum);
                    using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, trainData.InputCount, num_neurons_hidden, trainData.OutputCount))
                    {
                        net.SetCallback(TrainingCallback, "Hello!");

                        net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_INCREMENTAL;

                        net.LearningMomentum = momentum;

                        net.TrainOnData(trainData, 20000, 500, desired_error);

                        Console.WriteLine("MSE error on train data: {0}", net.TestData(trainData));
                        Console.WriteLine("MSE error on test data: {0}", net.TestData(testData));
                    }

                }
            }
            Console.ReadKey();
        }
Ejemplo n.º 42
0
        public void Constructor_EachNeuronInEachLayerIsConnectedToEveryNeuronInAdjacentLayers()
        {
            NeuralNet net = new NeuralNet(neuronsPerLayer: new[] { 2, 3, 2 });

            IList<Neuron> inputLayerNeurons = net.Layers[0].Neurons;
            IList<Neuron> hiddenLayerNeurons = net.Layers[1].Neurons;
            IList<Neuron> outputLayerNeurons = net.Layers[2].Neurons;

            // Each neuron in the input layer should have 0 inputs, and three outputs.
            Assert.AreEqual(0, inputLayerNeurons[0].Inputs.Count);
            Assert.AreEqual(3, inputLayerNeurons[0].Outputs.Count);
            Assert.AreEqual(0, inputLayerNeurons[1].Inputs.Count);
            Assert.AreEqual(3, inputLayerNeurons[1].Outputs.Count);

            // Each neuron in the hidden layer should have two inputs, and two outputs.
            Assert.AreEqual(2, hiddenLayerNeurons[0].Inputs.Count);
            Assert.AreEqual(2, hiddenLayerNeurons[0].Outputs.Count);
            Assert.AreEqual(2, hiddenLayerNeurons[1].Inputs.Count);
            Assert.AreEqual(2, hiddenLayerNeurons[1].Outputs.Count);

            // Each neuron in the output layer should have three inputs, and one outputs.
            Assert.AreEqual(3, outputLayerNeurons[0].Inputs.Count);
            Assert.AreEqual(1, outputLayerNeurons[0].Outputs.Count);
            Assert.AreEqual(3, outputLayerNeurons[1].Inputs.Count);
            Assert.AreEqual(1, outputLayerNeurons[1].Outputs.Count);

            // All the first nodes outputs from the first layer should be the first inputs into each neuron in the hidden layer.
            Assert.AreEqual(inputLayerNeurons[0].Outputs[0], hiddenLayerNeurons[0].Inputs[0]);
            Assert.AreEqual(inputLayerNeurons[0].Outputs[1], hiddenLayerNeurons[1].Inputs[0]);
            Assert.AreEqual(inputLayerNeurons[0].Outputs[2], hiddenLayerNeurons[2].Inputs[0]);

            // All the second nodes outputs from the first layer should be the second inputs into each neuron in the hidden layer.
            Assert.AreEqual(inputLayerNeurons[1].Outputs[0], hiddenLayerNeurons[0].Inputs[1]);
            Assert.AreEqual(inputLayerNeurons[1].Outputs[1], hiddenLayerNeurons[1].Inputs[1]);
            Assert.AreEqual(inputLayerNeurons[1].Outputs[2], hiddenLayerNeurons[2].Inputs[1]);

            // All the first outputs from each node in the hidden layer should be the inputs to the first node in the output layer.
            Assert.AreEqual(hiddenLayerNeurons[0].Outputs[0], outputLayerNeurons[0].Inputs[0]);
            Assert.AreEqual(hiddenLayerNeurons[1].Outputs[0], outputLayerNeurons[0].Inputs[1]);
            Assert.AreEqual(hiddenLayerNeurons[2].Outputs[0], outputLayerNeurons[0].Inputs[2]);

            // All the second outputs from each node in the hidden layer should be the inputs to the second node in the output layer.
            Assert.AreEqual(hiddenLayerNeurons[0].Outputs[1], outputLayerNeurons[1].Inputs[0]);
            Assert.AreEqual(hiddenLayerNeurons[1].Outputs[1], outputLayerNeurons[1].Inputs[1]);
            Assert.AreEqual(hiddenLayerNeurons[2].Outputs[1], outputLayerNeurons[1].Inputs[2]);
        }
Ejemplo n.º 43
0
        private static void TrainOnSteepnessFile(NeuralNet net, string filename,
            uint max_epochs, uint epochs_between_reports,
            float desired_error, float steepness_start,
            float steepness_step, float steepness_end)
        {
            float error;
            using (TrainingData data = new TrainingData())
            {
                data.ReadTrainFromFile(filename);

                if (epochs_between_reports != 0)
                {
                    Console.WriteLine("Max epochs {0}. Desired error: {1}", max_epochs.ToString("00000000"), desired_error.ToString("0.0000000000"));
                }

                net.ActivationSteepnessHidden = steepness_start;
                net.ActivationSteepnessOutput = steepness_start;
                for (int i = 1; i <= max_epochs; i++)
                {
                    error = net.TrainEpoch(data);

                    if(epochs_between_reports != 0 && (i % epochs_between_reports == 0 || i == max_epochs || i == 1 || error < desired_error))
                    {
                        Console.WriteLine("Epochs     {0}. Current error: {1}", i.ToString("00000000"), error.ToString("0.0000000000"));
                    }

                    if(error < desired_error)
                    {
                        steepness_start += steepness_step;
                        if(steepness_start <= steepness_end)
                        {
                            Console.WriteLine("Steepness: {0}", steepness_start);
                            net.ActivationSteepnessHidden = steepness_start;
                            net.ActivationSteepnessOutput = steepness_start;
                        }
                        else
                        {
                            break;
                        }
                    }
                }
            }
        }
Ejemplo n.º 44
0
        static void Main()
        {
            const uint num_layers = 3;
            const uint num_neurons_hidden = 32;
            const float desired_error = 0.0001F;
            const uint max_epochs = 300;
            const uint epochs_between_reports = 10;

            Console.WriteLine("Creating network.");
            using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\mushroom.train"))
            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, data.InputCount, num_neurons_hidden, data.OutputCount))
            {
                Console.WriteLine("Training network.");

                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.SIGMOID;

                net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error);

                Console.WriteLine("Testing network.");

                using (TrainingData testData = new TrainingData())
                {
                    testData.ReadTrainFromFile("..\\..\\..\\datasets\\mushroom.test");
                    net.ResetMSE();
                    for (int i = 0; i < testData.TrainDataLength; i++)
                    {
                        // The difference between calling GetTrain[Input|Output] and calling
                        // the Input and Output properties is huge in terms of speed
                        net.Test(testData.GetTrainInput((uint)i).Array, testData.GetTrainOutput((uint)i).Array);
                    }

                    Console.WriteLine("MSE error on test data {0}", net.MSE);

                    Console.WriteLine("Saving network.");

                    net.Save("..\\..\\..\\examples\\mushroom_float.net");

                    Console.ReadKey();
                }

            }
        }
Ejemplo n.º 45
0
        static void Main()
        {
            const uint num_layers = 3;
            const uint num_neurons_hidden = 96;
            const float desired_error = 0.001F;

            Console.WriteLine("Creating network.");

            using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\robot.train"))
            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, data.InputCount, num_neurons_hidden, data.OutputCount))
            using (TrainingData testData = new TrainingData())
            {
                Console.WriteLine("Training network.");

                net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_INCREMENTAL;
                net.LearningMomentum = 0.4F;

                net.TrainOnData(data, 3000, 10, desired_error);

                Console.WriteLine("Testing network.");
                testData.ReadTrainFromFile("..\\..\\..\\datasets\\robot.test");
                try
                {
                    net.ResetMSE();
                    for (int i = 0; i < testData.TrainDataLength; i++)
                    {
                        net.Test(testData.GetTrainInput((uint)i).Array, testData.GetTrainOutput((uint)i).Array);
                    }
                    Console.WriteLine("MSE error on test data: {0}", net.MSE);

                    Console.WriteLine("Saving network.");

                    net.Save("..\\..\\..\\datasets\\robot_float.net");
                }
                catch (Exception e)
                {
                    Console.WriteLine("Exception: {0}", e.Message);
                }
                Console.ReadKey();
            }
        }
Ejemplo n.º 46
0
        static void Main()
        {
            const uint num_input = 2;
            const uint num_output = 1;
            const uint num_layers = 3;
            const uint num_neurons_hidden = 3;
            const float desired_error = 0.001F;
            const uint max_epochs = 500000;
            const uint epochs_between_reports = 1000;

            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_neurons_hidden, num_output))
            {
                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.SIGMOID_SYMMETRIC;

                net.TrainOnFile("..\\..\\..\\examples\\xor.data", max_epochs, epochs_between_reports, desired_error);

                net.Save("..\\..\\..\\examples\\xor_float.net");

                Console.ReadKey();
            }
        }
Ejemplo n.º 47
0
        static void Main()
        {
            const uint num_input = 2;
            const uint num_output = 1;
            const uint num_layers = 3;
            const uint num_neurons_hidden = 3;
            const float desired_error = 0.001F;
            const uint max_epochs = 500000;
            const uint epochs_between_reports = 1000;
            DataType[] calc_out;

            using (TrainingData data = new TrainingData("..\\..\\..\\examples\\xor.data"))
            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_neurons_hidden, num_output))
            {

                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.SIGMOID_SYMMETRIC;

                net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_QUICKPROP;

                TrainOnSteepnessFile(net, "..\\..\\..\\examples\\xor.data", max_epochs, epochs_between_reports, desired_error, 1.0F, 0.1F, 20.0F);

                net.ActivationFunctionHidden = ActivationFunction.THRESHOLD_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.THRESHOLD_SYMMETRIC;

                for(int i = 0; i != data.TrainDataLength; i++)
                {
                    calc_out = net.Run(data.GetTrainInput((uint)i));
                    Console.WriteLine("XOR test ({0}, {1}) -> {2}, should be {3}, difference={4}",
                                        data.InputAccessor[i][0], data.InputAccessor[i][1], calc_out[0], data.OutputAccessor[i][0],
                                        FannAbs(calc_out[0] - data.OutputAccessor[i][0]));
                }

                net.Save("..\\..\\..\\examples\\xor_float.net");

                Console.ReadKey();
            }
        }
Ejemplo n.º 48
0
        private void button3_Click(object sender, EventArgs e)
        {
            string status = "";
            if (!File.Exists(textBox2.Text + ".ann"))
                status += "ANN missing ";
            if (!File.Exists(textBox2.Text + ".words.cat"))
                status += "Words CAT missing ";
            if (!File.Exists(textBox2.Text + ".words.dict"))
                status += "Words DICT missing ";
            if (status != "")
                label7.Text = "Files: " + status;
            else
            {
                label7.Text = "";
                textBox4.Text = "";
                IDictionary<string, int> words2index = new Dictionary<string, int>();
                IDictionary<int,string> cat2index = new Dictionary<int, string>();
                StreamReader dictFileStream = null;
                StreamReader catFileStream = null;
                try
                {
                    dictFileStream = File.OpenText(textBox2.Text + ".words.dict");
                    int i = 0;
                    while (!dictFileStream.EndOfStream)
                    {
                        words2index.Add(dictFileStream.ReadLine(), i++);
                    }
                    catFileStream = File.OpenText(textBox2.Text + ".words.cat");
                    i = 0;
                    while (!catFileStream.EndOfStream)
                    {
                        cat2index.Add(i++, catFileStream.ReadLine());
                    }
                }
                finally
                {
                    if (dictFileStream != null)
                        dictFileStream.Close();
                    if (catFileStream != null)
                        catFileStream.Close();
                }
                string text = new Regex(@"[^a-zA-Zа-яА-Я\s\.]+").Replace(textBox3.Text, "");
                string[] words = split(text);

                IDictionary<string, long> freqs = countFreq(words);
                double[] args = new double[words2index.Count];
                int ctr = 0;
                foreach (var v in words2index.OrderBy(x=>x.Value).Select(x=>x.Key))
                {
                    if (freqs.ContainsKey(v))
                    {
                        if (!checkBox1.Checked)
                            args[ctr] = ((double)freqs[v]) / words2index.Count;
                        else
                        {
                            args[ctr] = Math.Min(0.1,0.01*freqs[v]);
                        }
                    }
                    ctr++;
                }
                textBox4.Text += "Args values: " + args.Aggregate(string.Empty, (s, o) => s + " " + o.ToString())+Environment.NewLine;
                NeuralNet net = new NeuralNet();

                net.CreateFromFile(textBox2.Text+".ann");
                net.PrintError();
                double[] result = net.Run(args);
                net.PrintError();
                textBox4.Text += "Results array: " + result.Aggregate(string.Empty, (s, o) => s + " " + o.ToString()) + Environment.NewLine;
                int maxpos = -1;
                double maxval = -1;
                for(int i=0;i<result.Length;i++)
                {
                    if (result[i]>maxval)
                    {
                        maxval = result[i];
                        maxpos = i;
                    }
                }
                textBox4.Text += "Max value "+maxval+" for: " + cat2index[maxpos];
            }
        }
Ejemplo n.º 49
0
        // Test function that demonstrates usage of the fann C++ wrapper
        private static void xor_test()
        {
            System.Console.WriteLine("XOR test started.");

            const float LearningRate = 0.7f;
            const uint numInput = 2;
            const uint numHidden = 3;
            const uint numOutput = 1;
            const float desired_error = 0;
            const uint max_iterations = 1000;
            const uint iterations_between_reports = 10;

            System.Console.WriteLine("Creating network.");

            NeuralNet net = new NeuralNet();

            List<uint> layers = new List<uint>();
            layers.Add(numInput);
            layers.Add(numHidden);
            layers.Add(numOutput);

            net.CreateStandardArray(layers.ToArray());

            net.SetLearningRate(LearningRate);

            net.SetActivationSteepnessHidden(1.0);
            net.SetActivationSteepnessOutput(1.0);

            net.SetActivationFunctionHidden(ActivationFunction.SigmoidSymmetric);
            net.SetActivationFunctionOutput(ActivationFunction.SigmoidSymmetric);

            net.SetTrainStopFunction(StopFunction.Bit);
            net.SetBitFailLimit(0.01f);
            // Set additional properties such as the training algorithm
            //net.SetTrainingAlgorithm(FANN::TRAIN_QuickProp);

            // Output network type and parameters
            System.Console.WriteLine("Network Type                         :  ");
            switch (net.GetNetworkType())
            {
                case NetworkType.Layer:
                    System.Console.WriteLine("LAYER");
                    break;
                case NetworkType.ShortCut:
                    System.Console.WriteLine("SHORTCUT");
                    break;
                default:
                    System.Console.WriteLine("UNKNOWN");
                    break;
            }
            net.PrintParameters();

            System.Console.WriteLine("Training network.");

            TrainingData data = new TrainingData();
            bool d = data.ReadTrainFromFile("xor.data");
            if (d)
            {
                // Initialize and train the network with the data
                net.InitWeights(data);

                System.Console.WriteLine("Max Epochs " + max_iterations + ". "
                    + "Desired Error: " + desired_error);

                net.Callback += (nn, train, max_epochs, epochs_between_reports, de, epochs)
                    =>
                    {
                        System.Console.WriteLine("Epochs     " + epochs + ". " + "Current Error: " + nn.GetMSE() + "\n");
                        return 0;
                    };

                net.TrainOnData(data, max_iterations,
                    iterations_between_reports, desired_error);

                System.Console.WriteLine("Testing network.");

                for (uint i = 0; i < data.TrainingDataLength; ++i)
                {
                    // Run the network on the test data
                    double calcOut = net.Run(data.Input[i])[0];

                    System.Console.WriteLine("XOR test (" + data.Input[i][0] + ", "
                         + data.Input[i][1] + ") -> " + calcOut
                         + ", should be " + data.Output[i][0] + ", "
                         + "difference = "
                         + Math.Abs(calcOut - data.Output[i][0]));
                }

                System.Console.WriteLine("Saving network.");

                // Save the network in floating point and fixed point
                net.Save("xor_float.net");
                uint decimal_point = (uint)net.SaveToFixed("xor_fixed.net");
                data.SaveTrainToFixed("xor_fixed.data", decimal_point);

                System.Console.WriteLine("XOR test completed.");

            }
            else
                Console.WriteLine("Training file not found");
        }
Ejemplo n.º 50
0
        private void button4_Click(object sender, EventArgs e)
        {
            int fileCount = 0;

            for (int i = 1; i < 10; i++)
            {
                DirectoryInfo di = new DirectoryInfo(@"TrainNeural\\" + i.ToString());
                FileInfo[] bmpFiles = di.GetFiles("*.png");
                fileCount += bmpFiles.Length;
            }

            double[,] input = new double[fileCount, 480];
            double[,] output = new double[fileCount, 10];

            List<string> hashes = new List<string>();
            int count = 0;

            for (int i = 1; i < 10; i++)
            {
                DirectoryInfo di = new DirectoryInfo(@"TrainNeural\\" + i.ToString());
                FileInfo[] bmpFiles = di.GetFiles("*.png");

                foreach (FileInfo fi in bmpFiles)
                {
                    string hash = FileHashSum(fi.FullName);

                    if (hashes.Contains(hash))
                        continue;

                    hashes.Add(hash);

                    Bitmap bmp = new Bitmap(fi.FullName);

                    bmp = ImageProcessor.Binarization(bmp, ImageProcessor.OtsuThreshold(bmp));
                    ImageProcessor.GetNumericView(bmp, ref input, count);

                    for (int j = 1; j < 10; j++)
                    {
                        if (j == i)
                            output[count, j - 1] = 1;
                        else
                            output[count, j - 1] = 0;
                    }

                    count++;
                }
            }

            if (File.Exists("TrainingData.tr"))
                File.Delete("TrainingData.tr");

            string fillTrainFile = count.ToString() + " 480 9" + Environment.NewLine;

            for (int i = 0; i < count; i++)
            {
                for (int x = 0; x < 480; x++)
                {
                    fillTrainFile += input[i, x].ToString();
                    if (x < 479)
                        fillTrainFile += " ";
                    else
                        fillTrainFile += Environment.NewLine;
                }

                for (int x = 0; x < 9; x++)
                {
                    fillTrainFile += output[i, x].ToString();
                    if (x < 8)
                       fillTrainFile += " ";
                    else
                        fillTrainFile += Environment.NewLine;
                }

                if (i % 40 == 0)
                {
                    File.AppendAllText("TrainingData.tr", fillTrainFile);
                    fillTrainFile = String.Empty;
                }
            }

            File.AppendAllText("TrainingData.tr", fillTrainFile);

            NeuralNet neuralNet = new NeuralNet();

            uint[] layers = { 480, 190, 9 };
            neuralNet.CreateStandardArray(layers);

            neuralNet.RandomizeWeights(-0.1, 0.1);
            neuralNet.SetLearningRate(0.7f);

            TrainingData trainingData = new TrainingData();
            trainingData.ReadTrainFromFile("TrainingData.tr");

            switch (comboBox1.SelectedIndex)
            {
                case 0:
                    neuralNet.TrainOnData(trainingData, 1000, 0, 0.1f);
                    break;
                case 1:
                    neuralNet.TrainOnData(trainingData, 1000, 0, 0.05f);
                    break;
                case 2:
                    neuralNet.TrainOnData(trainingData, 1000, 0, 0.01f);
                    break;
                case 3:
                    neuralNet.TrainOnData(trainingData, 1000, 0, 0.005f);
                    break;
                case 4:
                    neuralNet.TrainOnData(trainingData, 1000, 0, 0.001f);
                    break;
            }

            neuralNet.Save("NeuralNet.ann");

            renewNeural = true;
        }
Ejemplo n.º 51
0
 // Use this for initialization
 void Start()
 {
     fitness = 0.0f;
     player = GameObject.Find ("Warrior");
     moveSpeed = 10.0f;
     rotateSpeed = 100.0f;
     animSelector = GetComponentsInChildren<OrcAnimationSelector>();
     hp = player.GetComponent<Experience>().enemyHealth;
     damage = player.GetComponent<Experience>().enemyDamage;
     alive = true;
     net = GetComponent<NeuralNet>();
     rays = GetComponent<Sensors>();
     radar = GetComponent<RangedRadar>();
     inputs = new List<double>();
     outputs = new List<double>();
     catWrath = GameObject.FindGameObjectWithTag ("catslider");
     transform.Translate (Vector3.up);
 }
Ejemplo n.º 52
0
 int fannProgress(NeuralNet net, TrainingData train, uint maxEpochs, uint epochsBetweenReports, float desiredError, uint epochs)
 {
     report("Training: epoch " + epochs + " of " + maxEpochs);
     return 0;
 }
Ejemplo n.º 53
0
 static int TrainingCallback(NeuralNet net, TrainingData data, uint maxEpochs, uint epochsBetweenReports, float desiredError, uint epochs, object userData)
 {
     System.GC.Collect(); // Make sure nothing's getting garbage-collected prematurely
     GC.WaitForPendingFinalizers();
     Console.WriteLine("Callback: Last neuron weight: {0}, Last data input: {1}, Max epochs: {2}\nEpochs between reports: {3}, Desired error: {4}, Current epoch: {5}\nGreeting: \"{6}\"",
                         net.ConnectionArray[net.TotalConnections - 1].Weight, data.InputAccessor.Get((int)data.TrainDataLength - 1, (int)data.InputCount - 1),
                         maxEpochs, epochsBetweenReports, desiredError, epochs, userData);
     return 1;
 }
Ejemplo n.º 54
0
 public IntellectBoard22AntiCaptcha()
 {
     net = new NeuralNet();
     net.CreateFromFile(Directory.GetCurrentDirectory() + "\\..\\..\\..\\RecognizerPictures\\nwFiles\\IntellectBoard22.ann");
 }
Ejemplo n.º 55
0
        private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
        {
            words2pos = new Dictionary<string, long>();
            wordsOrder = new List<string>();
            outputValues = new Dictionary<string, long>();
            totalRows = 0;
            Invoke(new MethodInvoker(
                 delegate
                 {
                     button1.Enabled = false;
                     tabControl1.Enabled = false;
                 }
                 ));

            Tuple<string, string> arg = (Tuple<string, string>)e.Argument;

            OleDbConnection myOleDbConnection = null;
            OleDbCommand myOleDbCommand = null;
            OleDbDataReader myOleDbDataReader = null;
            StreamWriter outputFile = null;
            StreamWriter dictOutputFile = null;
            StreamWriter catOutputFile = null;

            try
            {
                string connectionString = "provider=Microsoft.Jet.OLEDB.4.0;data source=" + arg.Item1;
                myOleDbConnection = new OleDbConnection(connectionString);
                myOleDbCommand = myOleDbConnection.CreateCommand();

                // TOP 100
                myOleDbCommand.CommandText = "SELECT TOP " + (int)numericUpDown3.Value + " Recept, Content, ReceptDescription,DishType FROM tblMain";
                myOleDbConnection.Open();
                myOleDbDataReader = myOleDbCommand.ExecuteReader();
                int i = 0;
                while (myOleDbDataReader.Read())
                {
                    i++;
                    prepareTrainingData((string)myOleDbDataReader["Content"], (string)myOleDbDataReader["DishType"], 1, null);
                    if (i % 541 == 0)
                    {
                        report("First pass: "******"SELECT TOP " + (int)numericUpDown3.Value + " Recept, Content, ReceptDescription,DishType FROM tblMain";
                myOleDbDataReader = myOleDbCommand.ExecuteReader();
                outputFile = new StreamWriter(File.OpenWrite(arg.Item2));
                outputFile.WriteLine(totalRows + " " + wordsOrder.Count + " " + outputValues.Count);
                i = 0;
                while (myOleDbDataReader.Read())
                {
                    i++;
                    prepareTrainingData((string)myOleDbDataReader["Content"], (string)myOleDbDataReader["DishType"], 2, outputFile);
                    if (i % 541 == 0)
                    {
                        report("Second pass: "******"Dict and cat dump");
                dictOutputFile = new StreamWriter(File.OpenWrite(arg.Item2 + ".words.dict"));
                foreach (string word in wordsOrder)
                {
                    dictOutputFile.WriteLine(word);
                }
                catOutputFile = new StreamWriter(File.OpenWrite(arg.Item2 + ".words.cat"));
                foreach (string val in outputValues.OrderBy(x => x.Value).Select(x => x.Key))
                {
                    catOutputFile.WriteLine(val);
                }

                report("Creating network");
                NeuralNet net = new NeuralNet();
                net.SetActivationFunctionHidden(ActivationFunction.SigmoidSymmetric);
                net.Callback += new NeuralNet.CallbackType(fannProgress);
                uint[] layers = textBox5.Text.Split(new char[] { ',' }).Select(x => UInt32.Parse(x.Trim())).ToArray();
                net.CreateStandardArray(layers);

                TrainingData data = new TrainingData();
                outputFile.Close();
                report("Reading data");
                data.ReadTrainFromFile(arg.Item2);
                report("Doing training");
                net.TrainOnData(data, (uint)numericUpDown1.Value, 10, (float)numericUpDown2.Value);

                net.Save(arg.Item2 + ".ann");
                report("Done training. Saved.");
            }
            finally
            {
                if (myOleDbDataReader != null)
                    myOleDbDataReader.Close();
                if (myOleDbCommand != null)
                    myOleDbCommand.Cancel();
                if (myOleDbConnection != null)
                    myOleDbConnection.Close();
                if (outputFile != null)
                    outputFile.Close();
                if (dictOutputFile != null)
                    dictOutputFile.Close();
                if (catOutputFile != null)
                    catOutputFile.Close();
            }
        }
Ejemplo n.º 56
0
    // Use this for initialization
    void Awake()
    {
        rangedpath = Application.dataPath+"/ranged.txt";
        meleepath = Application.dataPath+"/melee.txt";
        n = GameObject.Find ("ReferenceNet").GetComponent<NeuralNet>();
        numWeights = (n.numInputs + 1)*n.numNeurons + (n.numNeurons + 1)*n.numNeurons*(n.numHiddenLayers - 1) + (n.numNeurons + 1)*(n.numOutputs);
        popSize = 30;
        mutationRate = 0.2;
        maxPerturbation = 0.2;
        crossoverRate = 0.7;

        population = new List<Genome>();
        weights = new List<double>();
        rangedWeights = new List<double>();
        meleeWeights = new List<double>();

        TextReader reader = File.OpenText (rangedpath);
        TextReader mreader = File.OpenText (meleepath);
        string rline;
        string mline;
        string[] rweight;
        string[] mweight;
        double test;

        rline = reader.ReadLine();
        rweight = rline.Split (' ');

        mline = mreader.ReadLine();
        mweight = mline.Split (' ');

        foreach(string w in rweight){
            double.TryParse (w, out test);
            rangedWeights.Add (test);
        }

        while(rangedWeights.Count > numWeights){
            rangedWeights.RemoveAt (rangedWeights.Count - 1);
        }

        foreach(string w in mweight){
            double.TryParse (w, out test);
            meleeWeights.Add (test);
        }

        while(meleeWeights.Count > numWeights){
            meleeWeights.RemoveAt (meleeWeights.Count - 1);
        }

        // Set up the initial population with random weights
        for(int i = 0; i < popSize; i++){
            // Half melee, half ranged to start
            if(i < 15){
                initializeGenome(meleeWeights, 0);
            }
            else
                initializeGenome(rangedWeights, 1);
        }
    }
Ejemplo n.º 57
0
        public void Think_ReturnsTheCorrectOutput()
        {
            // Create the initialization data.
            // This data represents the biases and weights for every input synapse
            // for every node in every layer but the first.
            // This means we need one bias and one weight for the only node in the second layer.
            double[] initializationData = new[] { 0.5, 0.25 };

            // Setup a net with two layers of one neuron each.
            NeuralNet net = new NeuralNet(new[] { 1, 1 }, initializationData);

            // Execute the code to test.
            IEnumerable<double> output = net.Think(inputs: new[] { 0.3 });

            // If the input is 0.3, then the output of the single node in the input layer is 0.3.
            // The weight of this synapse to the output layer is 0.25, and the bias is 0.5.
            // The sum of the inputs to the output node is 0.25 * 0.3 is .075.
            // Add this number to the bias to get .575.
            // Run this number through the sigmoid activation function.
            double expectedOutputOfNet = 1 / (1 + Math.Pow(Math.E, -0.575));

            // Validate that we got the correct output.
            Assert.IsNotNull(output);
            Assert.AreEqual(1, output.Count());
            Assert.AreEqual(expectedOutputOfNet, output.First());
        }
Ejemplo n.º 58
0
        public void Constructor_EachNeuronInInputLayerHasNoInputs()
        {
            NeuralNet net = new NeuralNet(neuronsPerLayer: new[] { 3, 2, 4 });

            foreach (Neuron neuron in net.Layers.First().Neurons)
            {
                Assert.AreEqual(0, neuron.Inputs.Count);
            }
        }
Ejemplo n.º 59
0
        public void Constructor_EachNeuronInOutputLayerHasSingleOutput()
        {
            NeuralNet net = new NeuralNet(neuronsPerLayer: new[] { 3, 2, 4 });

            foreach (Neuron neuron in net.Layers.Last().Neurons)
            {
                Assert.AreEqual(1, neuron.Outputs.Count);
                Assert.IsNotNull(neuron.Outputs[0]);
            }
        }
Ejemplo n.º 60
0
        public void Constructor_InitializationData_UsedCorrectlyForBiasesAndWeights()
        {
            // Create the initialization data for the biases and input weights
            // for every node in every layer but the input layer.
            double[] initializationData = new[]
            {
                0.3, 0.4, // The bias and weight for the first node in the hidden layer.
                0.5, 0.6, // The bias and weight for the second node in the hidden layer.
                0.7, 0.8, 0.9 // The bias and weight for the single node in the output layer.
            };

            NeuralNet neuralNet = new NeuralNet(
                neuronsPerLayer: new[] { 1, 2, 1 },
                weightsAndBiases: initializationData);

            // Validate the single node in the input has no inputs.
            Assert.AreEqual(0, neuralNet.Layers[0].Neurons[0].Inputs.Count, "Layer 1 Node 1 Input 1");

            // Validate the first node in the hidden layer is initialized correctly.
            Assert.AreEqual(0.3, neuralNet.Layers[1].Neurons[0].Bias, "Layer 2 Node 1 bias");
            Assert.AreEqual(0.4, neuralNet.Layers[1].Neurons[0].Inputs[0].Weight, "Layer 2 Node 1 Input 1 weight");

            // Validate the second node in the hidden layer is initialized correctly.
            Assert.AreEqual(0.5, neuralNet.Layers[1].Neurons[1].Bias, "Layer 2 Node 2 bias");
            Assert.AreEqual(0.6, neuralNet.Layers[1].Neurons[1].Inputs[0].Weight, "Layer 2 Node 2 Input 1 weight");

            // Validate the single node in the output layer is initialized correctly.
            Assert.AreEqual(0.7, neuralNet.Layers[2].Neurons[0].Bias, "Layer 3 Node 1 bias");
            Assert.AreEqual(0.8, neuralNet.Layers[2].Neurons[0].Inputs[0].Weight, "Layer 3 Node 1 Input 1 weight");
            Assert.AreEqual(0.9, neuralNet.Layers[2].Neurons[0].Inputs[1].Weight, "Layer 3 Node 1 Input 2 weight");
        }