public double EvaluateFitness(INeuralNetwork network)
        {
            var mine = new MinesweeperBase();

            mine.Setup(new MinesweeperConfig()
            {
                BombCount = 50
            });
            int clicks = 0;
            int score  = 0;

            while (mine.GameEnd != true && clicks < mine.MaxScore)
            {
                var    result = network.FeedForward(mine.Grid.Cells.Select(p => p.Value).ToArray().ToDoubleArray());
                double x = 0, y = 0;
                x = result[0] * mine.Width;
                y = result[1] * mine.Width;
                var cell = mine.Grid.Cells.FirstOrDefault(p => p.Hit((int)x, (int)y));
                if (cell != null)
                {
                    var c = mine.ClickOnCell(cell, false);
                    if (c != true)
                    {
                        score--;
                    }
                }
                clicks++;
            }

            return(mine.Score + score);
        }
Example #2
0
 public Simulation(INeuralNetwork neuralNetwork, bool logger = false)
 {
     this.neuralNetwork = neuralNetwork;
     this.logger        = logger;
     stopwatch          = new Stopwatch();
     Reset();
 }
        public void Test_NeuralNetworkLogic()
        {
            INeuralNetworkBuilder <double> builder = new SigmoidNeuralNetworkBuilder <GeneticTrainingNeuralNetwork>();

            builder.BuildNetwork(2, new int[] { 3 }, 1);
            INeuralNetwork <double> network = builder.InitializeNeuralNetworkWithData(
                new double[] { 1, 1 },
                null,
                null);

            // First Hidden Layer
            network.HiddenLayers.ElementAt(0)[0].Inputs.ElementAt(0).DendriteWeight.Weight = 0.8;
            network.HiddenLayers.ElementAt(0)[0].Inputs.ElementAt(1).DendriteWeight.Weight = 0.2;
            network.HiddenLayers.ElementAt(0)[1].Inputs.ElementAt(0).DendriteWeight.Weight = 0.4;
            network.HiddenLayers.ElementAt(0)[1].Inputs.ElementAt(1).DendriteWeight.Weight = 0.9;
            network.HiddenLayers.ElementAt(0)[2].Inputs.ElementAt(0).DendriteWeight.Weight = 0.3;
            network.HiddenLayers.ElementAt(0)[2].Inputs.ElementAt(1).DendriteWeight.Weight = 0.5;
            // Output Nueron
            network.Output.Neurons.ElementAt(0)[0].DendriteWeight.Weight = 0.3;
            network.Output.Neurons.ElementAt(0)[0].DendriteWeight.Weight = 0.5;
            network.Output.Neurons.ElementAt(0)[0].DendriteWeight.Weight = 0.9;
            // Run sigmoid function
            network.Pulse(null);
            ;
        }
Example #4
0
        private void CrearNeuralNetwork()
        {
            int neuronasOcultas  = (int)spinNeuronasOculta.Value;
            var activacionOculta = cboFuncionActivacionOculta.SelectedItem as EnumInfo <ActivationType>;
            var pesosOculta      = cboPesosOculta.SelectedItem as EnumInfo <WeightsInitializationMode>;
            var biasOculta       = cboBiasOculta.SelectedItem as EnumInfo <BiasInitializationMode>;

            var activacionSalida = cboFuncionActivacionSalida.SelectedItem as EnumInfo <ActivationType>;
            var funcionCosto     = cboFuncionCosto.SelectedItem as EnumInfo <CostFunctionType>;
            var pesosSalida      = cboPesosSalida.SelectedItem as EnumInfo <WeightsInitializationMode>;
            var biasSalida       = cboBiasSalida.SelectedItem as EnumInfo <BiasInitializationMode>;

            LayerFactory layerSalida;

            if (activacionSalida.Valor == ActivationType.Softmax)
            {
                layerSalida = NetworkLayers.Softmax(3, pesosOculta.Valor, biasOculta.Valor);
            }
            else
            {
                layerSalida = NetworkLayers.FullyConnected(3, activacionSalida.Valor, funcionCosto.Valor, pesosSalida.Valor, biasSalida.Valor);
            }

            _neuralNetwork = NetworkManager.NewSequential(TensorInfo.Linear(4),
                                                          NetworkLayers.FullyConnected(neuronasOcultas, activacionOculta.Valor, pesosOculta.Valor, biasOculta.Valor),
                                                          layerSalida);
        }
Example #5
0
        public void TrainOnMemory(IConcurrentMemory <TData> memory)
        {
            var trainingData = _dataBuilder.BuildDataset(memory);

            if (trainingData == null || trainingData.Count == 0)
            {
                return;
            }

            var clonedInstance = _network.Clone();
            // Train the network
            var result = NetworkManager.TrainNetwork(clonedInstance,
                                                     trainingData,
                                                     TrainingAlgorithms.AdaDelta(),
                                                     _configuration.Epochs, 0.5f,
                                                     TrackBatchProgress,
                                                     TrainingProgress);

            Console.WriteLine("\nTraining session completed, moving to next one");

            var backupName = $"backup-network-{DateTime.Now:yyyyMMdd-HH-mm-ss-fff}.modl";

            _network.Save(File.Create(backupName));
            Console.WriteLine($"Backup model {backupName} saved");
            _network = clonedInstance;
        }
        private INeuralNetwork CreateNetwork()
        {
            INeuralNetwork neuralNet = null;

            var sizes = txtSizes.Text.Split(',').Select(s => Convert.ToInt32(s)).ToArray();

            if (rbNetwork1.Checked)
            {
                neuralNet = new Network(sizes);
            }
            else if (rbNetwork2.Checked)
            {
                var network2 = new Network2(sizes, 5.0f, 0.3f);

                neuralNet = network2;
            }

            neuralNet.NewLogMessage =
                m => this.Invoke((MethodInvoker) delegate
            {
                lbOutput.Items.Add(m);
            });

            return(neuralNet);
        }
Example #7
0
        public void Train(IList<InputOutput> trainingSet, INeuralNetwork nn)
        {
            Validate();

            if (nn == null)
                throw new ArgumentNullException(nameof(nn));

            var rand = RandomProvider.GetRandom(Seed);

            if (ShouldInitializeWeights)
                InitializeWeights(nn, rand);

            var validationSetFraction = GetValidationSetFraction();

            IList<InputOutput> trainingSubSet;
            IList<InputOutput> validationSubSet = null;

            if (validationSetFraction > 0)
            {
                var split = trainingSet.Shuffle(rand).Split(validationSetFraction);
                validationSubSet = split.First;
                trainingSubSet = split.Second;
            }
            else
            {
                trainingSubSet = trainingSet;
            }

            Train(trainingSubSet, validationSubSet, rand, nn);
        }
Example #8
0
        /*
         * Recupera a estrutura de uma RNA a partir de um arquivo
         */
        public static INeuralNetwork NeuralNetworkStructure(string file)
        {
            string json = ReadFile(file);

            ADReNA_API.Util.ExportImportCommon.CommonStructure stru = JsonConvert.DeserializeObject <ADReNA_API.Util.ExportImportCommon.CommonStructure>(json);
            INeuralNetwork ann = null;

            switch (stru.type)
            {
            case ExportImportCommon.AnnType.Backpropagation:
                ann = new Backpropagation(stru.inputLayerSize.Value, stru.outputLayerSize.Value, stru.hiddenLayerSizes);
                ((Backpropagation)ann).SetErrorRate(stru.error.Value);
                ((Backpropagation)ann).SetMaxIterationNumber(stru.iterationNumber.Value);
                ((Backpropagation)ann).SetLearningRate(stru.learningRate.Value);
                break;

            case ExportImportCommon.AnnType.Kohonen:
                ann = new Kohonen(stru.inputLayerSize.Value, stru.competitiveNeuronLength.Value, stru.maximumWeightRange.Value);
                ((Kohonen)ann).SetIterationNumber(stru.iterationNumber.Value);
                ((Kohonen)ann).SetLearningRate(stru.learningRate.Value);
                ((Kohonen)ann).SetNeighborhoodRadius(stru.neighborhoodRadius.Value);
                break;
            }

            return(ann);
        }
        private void btnCreateNetwork_Click(object sender, EventArgs e)
        {
            btnCreateNetwork.Enabled = false;
            UpdateControlStatus(btnCreateNetwork);

            if (mMnistLoader == null)
            {
                lblDataLoaded.Text = "Loading data...";
                UpdateControlStatus(lblDataLoaded);

                LoadData();
            }

            lblDataLoaded.Text = "Data loaded, training network...";
            UpdateControlStatus(lblDataLoaded);

            CleanUpOldNetwork();
            mNetwork = CreateNetwork();

            Task.Factory.StartNew(() =>
                                  mNetwork.SGD(
                                      mMnistLoader.TrainingData,
                                      (int)nudEpochs.Value,
                                      (int)nudBatchSize.Value,
                                      (float)nudLearningRate.Value,
                                      mMnistLoader.TestData)
                                  )
            .ContinueWith(t => {
                btnCreateNetwork.Enabled = true;
                lblDataLoaded.Text       = "Training Finished!";
            }
                          , TaskScheduler.FromCurrentSynchronizationContext());
        }
        /// <summary>
        /// Обновление синаптических весов обучаемой модели искусственной нейронной сети.
        /// </summary>
        private void UpdateSynapticWeights(INeuralNetwork neuralNetwork, List <List <List <double> > > previousSynapticWeights, List <double> inputSample, List <List <double> > neuronOutputs, List <List <double> > localGradients)
        {
            for (var i = 0; i < neuralNetwork.Layers.Count; i++)
            {
                for (var j = 0; j < neuralNetwork.Layers[i].Count; j++)
                {
                    var neuron = neuralNetwork.Layers[i][j];

                    for (var k = 0; k < neuron.SynapticWeights.Count(); k++)
                    {
                        var synapticWeightChange = neuron.LearningSpeed * localGradients[i][j];

                        if (i == 0)
                        {
                            synapticWeightChange *= inputSample[k];
                        }
                        else
                        {
                            synapticWeightChange *= neuronOutputs[i - 1][k];
                        }

                        if (previousSynapticWeights != null)
                        {
                            synapticWeightChange += Momentum * previousSynapticWeights[i][j][k];
                        }

                        neuron.SynapticWeights[k] += synapticWeightChange;
                    }
                }
            }
        }
        /// <summary>
        /// Обучает переданную модель искусственной нейронной сети.
        /// </summary>
        /// <param name="neuralNerwork">Модель обучаемой искусственной нейронной сети.</param>
        /// <param name="trainingSet">Тренировочная выборка вида (входной вектор, желаемый выходной вектор).</param>
        /// <param name="validationSet">Проверочная выборка вида (входной вектор, желаемый выходной вектор).</param>
        public INeuralNetwork Learn(INeuralNetwork neuralNetwork, IEnumerable <Tuple <IList <double>, IList <double> > > trainingSet, IEnumerable <Tuple <IList <double>, IList <double> > > validationSet)
        {
            Validate(neuralNetwork, trainingSet, validationSet);

            var normalizedTrainingSet   = NormilizeSet(neuralNetwork, trainingSet);
            var normalizedValidationSet = NormilizeSet(neuralNetwork, validationSet);

            var currentNetworkErrorValue = CalculateNetworkErrorValue(neuralNetwork, normalizedValidationSet);
            var minNetworkErrorValue     = currentNetworkErrorValue;
            var bestModel = (INeuralNetwork)neuralNetwork.Clone();

            for (var i = 0; i < LearningEpochMaxAmount; i++)
            {
                PerformEpoch(neuralNetwork, normalizedTrainingSet);

                currentNetworkErrorValue = CalculateNetworkErrorValue(neuralNetwork, normalizedValidationSet);

                if (currentNetworkErrorValue < minNetworkErrorValue && i + 1 >= _learningEpochMinAmount)
                {
                    minNetworkErrorValue = currentNetworkErrorValue;
                    bestModel            = (INeuralNetwork)neuralNetwork.Clone();
                }
                else
                {
                    break;
                }
            }

            return(bestModel);
        }
Example #12
0
 public static void Learn()
 {
     net = new Perceptron();
     net.Load(Settings.Instance.NetworkFileName);
     net.Learn(TrainingSet.Load(Settings.Instance.DataDirectoryPath));
     net.Save(Settings.Instance.NetworkFileName);
 }
Example #13
0
 public override void ApplyLearning(INeuralNetwork <double> source)
 {
     foreach (INeuron <double> neuron in this.Neurons)
     {
         neuron.ApplyLearning(this);
     }
 }
Example #14
0
        /// <summary>
        /// Creates a configuration file and the weights file.
        /// </summary>
        /// <param name="network">The network, which will be saved.</param>
        /// <param name="pathToDir">The directory where the configuration file and directory with the weights files will be created.</param>
        /// <param name="name">Name of the future .ncfg file without extension.</param>
        public void CreateNetworkFile(INeuralNetwork network, string pathToDir, string name)
        {
            var basis   = Path.Join(pathToDir, name);
            var file    = basis + ".ncfg";
            var dir     = basis + "_weights";
            var dirName = name + "_weights";

            Directory.CreateDirectory(dir);
            using (var writer = new StreamWriter(file))
            {
                writer.WriteLine($"path_to_weights = {dirName}");
                writer.WriteLine($"input_size = {network.InputSize}");
                writer.WriteLine($"layers_count = {network.LayersCount}");

                for (int i = 0; i < network.LayersCount; i++)
                {
                    var weightsFile  = $"{dirName}_{i}.wgt";
                    var currentLayer = network.Layers[i];

                    writer.WriteLine();
                    writer.WriteLine($"layer_size = {currentLayer.NeuronsCount}");
                    writer.WriteLine($"input_size = {currentLayer.InputSize}");
                    writer.WriteLine($"activation_function = {currentLayer.ActivationFunction}");
                    writer.WriteLine($"weights_file = {weightsFile}");

                    using (var weightsFileStream = new FileStream(Path.Join(dir, weightsFile), FileMode.Create))
                    {
                        _filesTable.Add(currentLayer, weightsFileStream);
                        WriteLayerWeights(currentLayer, weightsFileStream);
                    }
                }
            }
        }
Example #15
0
        public void Train(IList <InputOutput> trainingSet, INeuralNetwork nn)
        {
            Validate();

            if (nn == null)
            {
                throw new ArgumentNullException(nameof(nn));
            }

            var rand = RandomProvider.GetRandom(Seed);

            if (ShouldInitializeWeights)
            {
                InitializeWeights(nn, rand);
            }

            var validationSetFraction = GetValidationSetFraction();

            IList <InputOutput> trainingSubSet;
            IList <InputOutput> validationSubSet = null;

            if (validationSetFraction > 0)
            {
                var split = trainingSet.Shuffle(rand).Split(validationSetFraction);
                validationSubSet = split.First;
                trainingSubSet   = split.Second;
            }
            else
            {
                trainingSubSet = trainingSet;
            }

            Train(trainingSubSet, validationSubSet, rand, nn);
        }
Example #16
0
        public Simulation CreateSimulation()
        {
            var            activationFunction = CreateActivationFunction();
            var            neuron             = CreateNeuron(activationFunction);
            INeuralNetwork neuralNetwork      = null;

            if (Network == "NeuralNetwork")
            {
                var weightInitializer = new WeightInitializer(Weights.Min, Weights.Max);
                neuralNetwork = new NeuralNetwork(neuron, Inputs, Outputs, weightInitializer, HiddenNeurons);
            }
            else if (Network == "SOMNetwork")
            {
                neuralNetwork = new SOMNetwork(neuron, Inputs, Outputs, MaxEpoch);
            }

            var simulation = new Simulation(neuralNetwork, true);

            simulation.ValidationData = ValidationData;
            simulation.ImagesDisturbanceProbability  = ImagesDisturbanceProbability;
            simulation.ImageDisturbanceMaxDifference = ImageDisturbanceMaxDifference;
            simulation.MaxEpoch = MaxEpoch;
            simulation.Config   = this;
            return(simulation);
        }
        internal static void SaveNetwork(INeuralNetwork neuralNetwork, string filePath, string fileName)
        {
            var buffer = new NetworkBuffer();

            buffer.WriteString(Global.VERSION);
            buffer.WriteInt32(neuralNetwork.Layers.Count);
            for (int i = 0; i < neuralNetwork.Layers.Count; ++i)
            {
                var layer = neuralNetwork.Layers[i];
                buffer.WriteLayer(layer);
            }

            if (!Directory.Exists(filePath))
            {
                Directory.CreateDirectory(filePath);
            }

            var        path   = $"{filePath}/{fileName}.{Global.FILE_SUFFIX}";
            FileStream stream = new FileStream(path, FileMode.OpenOrCreate);
            var        bytes  = buffer.ToBytes();

            buffer.Close();
            stream.Write(bytes, 0, bytes.Length);
            stream.Close();
        }
        /// <summary>
        /// Проверка входных данных: структуры искусственной нейронной сети, тренировочной и проверочной выборок.
        /// </summary>
        private static void Validate(INeuralNetwork neuralNetwork, IEnumerable <Tuple <IList <double>, IList <double> > > trainingSet, IEnumerable <Tuple <IList <double>, IList <double> > > validationSet)
        {
            if (neuralNetwork.Layers.Count() == 0)
            {
                throw new ArgumentException("Исскуственная нейронная сеть не содержит ни одного слоя нейронов.");
            }

            foreach (var trainingSample in trainingSet)
            {
                if (trainingSample.Item1.Count() != neuralNetwork.InputValueDimension)
                {
                    throw new ArgumentException($"Размерность вектора входных данных обучающей выборки не совпадает с количеством синаптических связей нейронов входного слоя. Ожидалось: {neuralNetwork.InputValueDimension}, получено: {trainingSample.Item1.Count()}.");
                }

                if (trainingSample.Item2.Count() != neuralNetwork.OutputValueDimension)
                {
                    throw new ArgumentException($"Размерность вектора ожидаемых выходных данных обучающей выборки не совпадает с количеством нейронов выходного слоя. Ожидалось: {neuralNetwork.OutputValueDimension}, получено: {trainingSample.Item2.Count()}.");
                }
            }

            foreach (var validationSample in validationSet)
            {
                if (validationSample.Item1.Count() != neuralNetwork.InputValueDimension)
                {
                    throw new ArgumentException($"Размерность вектора входных данных проверочной выборки не совпадает с количеством синаптических связей нейронов входного слоя. Ожидалось: {neuralNetwork.InputValueDimension}, получено: {validationSample.Item1.Count()}.");
                }

                if (validationSample.Item2.Count() != neuralNetwork.OutputValueDimension)
                {
                    throw new ArgumentException($"Размерность вектора ожидаемых выходных данных проверочной выборки не совпадает с количеством нейронов выходного слоя. Ожидалось: {neuralNetwork.OutputValueDimension}, получено: {validationSample.Item2.Count()}.");
                }
            }
        }
Example #19
0
        public INeuralNetwork Mutate(INeuralNetwork network, double mutateChance, out bool didMutate)
        {
            NeuralNetworkGene childGenes = network.GetGenes();
            bool mutated;

            didMutate = false;
            if (_config.MutateNumberOfHiddenLayers)
            {
                childGenes = TryAddLayerToNetwork(childGenes, mutateChance, out mutated);
                didMutate  = mutated;
            }
            for (int n = 0; n < childGenes.InputGene.Neurons.Count; n++)
            {
                var neuron = childGenes.InputGene.Neurons[n];
                childGenes.InputGene.Neurons[n] = TryMutateNeuron(neuron, mutateChance, out mutated);;
                didMutate = didMutate || mutated;
            }

            for (int h = 0; h < childGenes.HiddenGenes.Count; h++)
            {
                if (_config.MutateNumberOfHiddenNeuronsInLayer)
                {
                    childGenes.HiddenGenes[h] = TryAddNeuronsToLayer(childGenes, h, mutateChance, out mutated);
                    didMutate = didMutate || mutated;
                }

                for (int j = 0; j < childGenes.HiddenGenes[h].Neurons.Count; j++)
                {
                    var neuron = childGenes.HiddenGenes[h].Neurons[j];
                    childGenes.HiddenGenes[h].Neurons[j] = TryMutateNeuron(neuron, mutateChance, out mutated);
                    didMutate = didMutate || mutated;
                }
            }
            return(_networkFactory.Create(childGenes));
        }
Example #20
0
        private static TrainingSessionResult TrainNetworkCore(
            [NotNull] INeuralNetwork network,
            [NotNull] ITrainingDataset dataset,
            [NotNull] ITrainingAlgorithmInfo algorithm,
            int epochs, float dropout,
            [CanBeNull] IProgress <BatchProgress> batchProgress,
            [CanBeNull] IProgress <TrainingProgressEventArgs> trainingProgress,
            [CanBeNull] IValidationDataset validationDataset,
            [CanBeNull] ITestDataset testDataset,
            CancellationToken token)
        {
            // Preliminary checks
            if (epochs < 1)
            {
                throw new ArgumentOutOfRangeException(nameof(epochs), "The number of epochs must at be at least equal to 1");
            }
            if (dropout < 0 || dropout >= 1)
            {
                throw new ArgumentOutOfRangeException(nameof(dropout), "The dropout probability is invalid");
            }

            // Start the training
            return(NetworkTrainer.TrainNetwork(
                       network as SequentialNetwork ?? throw new ArgumentException("The input network instance isn't valid", nameof(network)),
                       dataset as BatchesCollection ?? throw new ArgumentException("The input dataset instance isn't valid", nameof(dataset)),
                       epochs, dropout, algorithm, batchProgress, trainingProgress,
                       validationDataset as ValidationDataset,
                       testDataset as TestDataset,
                       token));
        }
 public async Task StoreNetworkAsync(INeuralNetwork network, double eval)
 {
     var networkParseFormat = new ParseObject(_networkVersion);
     networkParseFormat["jsonNetwork"] = JsonConvert.SerializeObject(network.GetGenes());
     networkParseFormat["eval"] = eval;
     await networkParseFormat.SaveAsync();
 }
Example #22
0
        public RNA(int capas)
        {
            // Definiendo el tipo de red.
            BackPropNetworkFactory factory = new BackPropNetworkFactory();

            //This is an arralist which holds the number of neurons in each layer
            ArrayList layers = new ArrayList();

            //Cant. de neuronas en la primera capa  (capa de entrada)
            layers.Add(36);

            int i;

            for (i = 0; i < capas; i++)
            {
                //Cant. de capas ocultas
                layers.Add(36);
            }

            //Can. de neuronas en la última capa (capa de salida)
            layers.Add(9);

            //Creo la red a través del patrón factory
            network = factory.CreateNetwork(layers);
        }
Example #23
0
        public static double GetAccuracy(List <TrainingElement> validationData, INeuralNetwork neuralNetwork)
        {
            var correctCount = 0;

            for (int i = 0; i < validationData.Count; i++)
            {
                var validationElement = validationData[i];
                var outputs           = neuralNetwork.Run(validationElement.Inputs);
                var isCorrect         = true;

                for (int j = 0; j < outputs.Length; j++)
                {
                    var outputValue = outputs[j] < 0.5D ? 0 : 1;
                    if (outputValue != validationElement.ExpectedOutputs[j])
                    {
                        isCorrect = false;
                        break;
                    }
                }

                if (isCorrect)
                {
                    correctCount++;
                }
            }

            return((((double)correctCount) / ((double)validationData.Count)) * 100D);
        }
Example #24
0
 /// <summary>
 /// Initializes a new instance of the <see cref="GenerateImagesForm" /> class.
 /// </summary>
 /// <param name="network">The neural network to use for generating the images.</param>
 /// <param name="imageWidth">The width of the images in the training/testing data.</param>
 /// <param name="imageHeight">The height of the images in the training/testing data.</param>
 public GenerateImagesForm(INeuralNetwork network, int imageWidth, int imageHeight) : this()
 {
     this.network             = network;
     this.layerUpDown.Maximum = this.network.LayerCount - 1;
     this.inputImageWidth     = imageWidth;
     this.inputImageHeight    = imageHeight;
 }
Example #25
0
        internal INeuralNetwork mate(INeuralNetwork mother, INeuralNetwork father)
        {
            NeuralNetworkGene motherGenes      = mother.GetGenes();
            NeuralNetworkGene childFatherGenes = father.GetGenes();
            Random            random           = new Random();

            for (int n = 0; n < childFatherGenes.InputGene.Neurons.Count; n++)
            {
                var neuron       = childFatherGenes.InputGene.Neurons[n];
                var motherNeuron = motherGenes.InputGene.Neurons[n];
                childFatherGenes.InputGene.Neurons[n] = BreedNeuron(neuron, motherNeuron, random);
            }

            if (childFatherGenes.HiddenGenes.Count >= motherGenes.HiddenGenes.Count)
            {
                childFatherGenes.HiddenGenes = MateHiddenLayers(childFatherGenes.HiddenGenes, motherGenes.HiddenGenes, random);
            }
            else
            {
                childFatherGenes.HiddenGenes = MateHiddenLayers(motherGenes.HiddenGenes, childFatherGenes.HiddenGenes, random);
            }

            for (int n = 0; n < childFatherGenes.OutputGene.Neurons.Count; n++)
            {
                var neuron       = childFatherGenes.OutputGene.Neurons[n];
                var motherNeuron = motherGenes.OutputGene.Neurons[n];
                childFatherGenes.OutputGene.Neurons[n] = BreedNeuron(neuron, motherNeuron, random);
            }

            INeuralNetwork child = _networkFactory.Create(childFatherGenes);

            return(child);
        }
Example #26
0
        /*
         * Serializa a estrutura da rede de uma RNA em um arquivo
         */
        public static void NeuralNetworkStructure(INeuralNetwork net, string file)
        {
            string json = "";

            if (net is Kohonen)
            {
                ADReNA_API.Util.ExportImportCommon.CommonStructure structure = new ExportImportCommon.CommonStructure();
                structure.type                    = ExportImportCommon.AnnType.Kohonen;
                structure.inputLayerSize          = ((Kohonen)net).GetInputLayerSize();
                structure.competitiveNeuronLength = ((Kohonen)net).GetCompetitiveLayerLength();
                structure.maximumWeightRange      = ((Kohonen)net).GetMaximumWeightRange();
                structure.iterationNumber         = ((Kohonen)net).GetIterationNumber();
                structure.learningRate            = ((Kohonen)net).GetLearningRate();
                structure.neighborhoodRadius      = ((Kohonen)net).GetNeighborhoodRadius();

                json = JsonConvert.SerializeObject(structure);
            }
            else //Backpropagation
            {
                ADReNA_API.Util.ExportImportCommon.CommonStructure structure = new ExportImportCommon.CommonStructure();
                structure.type             = ExportImportCommon.AnnType.Backpropagation;
                structure.inputLayerSize   = ((Backpropagation)net).GetInputLayerSize();
                structure.outputLayerSize  = ((Backpropagation)net).GetOutputLayerSize();
                structure.hiddenLayerSizes = ((Backpropagation)net).GetHiddenLayerSizes();
                structure.error            = ((Backpropagation)net).GetErrorRate();
                structure.iterationNumber  = ((Backpropagation)net).GetMaxIterationNumber();
                structure.learningRate     = ((Backpropagation)net).GetLearningRate();

                json = JsonConvert.SerializeObject(structure);
            }

            WriteFile(json, file);
        }
Example #27
0
 public TrainTests()
 {
     _mockTrainer = GetMockTrainer();
     _trainingSet = GetTrainingSet();
     _nn          = GetNeuralNetwork();
     MockRandom.SetUp();
 }
Example #28
0
        public override void Train(IList<InputOutput> trainingSet,
            IList<InputOutput> validationSet,
            IRandomGenerator rand,
            INeuralNetwork nn)
        {
            var prevWeightGradients = nn.Weights.DeepClone();

            foreach (var gradSet in prevWeightGradients)
            {
                for (var j = 0; j < gradSet.Length; j++)
                    gradSet[j] = 0;
            }

            for (var s = 0; s < NumEpochs; s++)
            {
                var t = rand.Next(trainingSet.Count);
                var inputOutput = trainingSet[t];

                var batch = GetBatch(trainingSet, BatchSize, rand);

                var gradients = nn.Weights.DeepCloneToZeros();

                for (var j = 0; j < BatchSize; j++)
                {
                    gradients.AddInPlace(
                        nn.CalculateGradients(batch[j].Input.AddRelativeNoise(MaxRelativeNoise, rand), batch[j].Output));
                }

                gradients.MultiplyInPlace(1 / ((double)BatchSize));

                //var gradients = nn.CalculateGradients(inputOutput.Input, inputOutput.Output);
                AdjustWeights(nn, gradients, prevWeightGradients);
                gradients.DeepCopyTo(prevWeightGradients);
            }
        }
Example #29
0
        public void Learn(INeuralNetwork net, ILearningSequence<ISupervisedLearningVector> seq)
        {
            LearningStory = new List<NeuronState>();
            bool isLearned = true;

            do
            {
                StoreState(net);
                isLearned = true;
                IEnumerator<ISupervisedLearningVector> en = seq.Vectors.GetEnumerator();
                while (en.MoveNext())
                {
                    net.SetInput(en.Current.Data);

                    net.Compute();

                    double delta = en.Current.CorrectOutput - net[0];
                    if (delta != 0.0)
                    {
                        isLearned = false;
                        CorrectWeights(net, delta > 0 ? 1.0 : -1.0);
                    }

                }
                seq.Shuffle();

            } while (!isLearned);
            StoreState(net);
        }
 private void Mutate(INeuralNetwork network)
 {
     for (var i = 0; i < network.Weights.Length; i++)
     {
         for (var i1 = 0; i1 < network.Weights[i].Length; i1++)
         {
             var weights = network.Weights;
             var option  = _random.Next(0, 100);
             //crossover
             if (option >= 10 && option < 50)
             {
                 weights[i][i1] *= _random.NextDouble() * 2 - 1;
             }
             //mutate
             else if (option < 10)
             {
                 weights[i][i1] = _random.NextDouble() * 2 - 1;
             }
             //invert
             else if (option >= 80)
             {
                 weights[i][i1] = -weights[i][i1];
             }
         }
     }
 }
Example #31
0
        public INeuralNetwork Mutate(INeuralNetwork network, double mutateChance, out bool didMutate)
        {
            NeuralNetworkGene childGenes = network.GetGenes();
            bool mutated;
            didMutate = false;
            if (_config.MutateNumberOfHiddenLayers)
            {
                childGenes = TryAddLayerToNetwork(childGenes, mutateChance, out mutated);
                didMutate = mutated;

            }
            for (int n = 0; n < childGenes.InputGene.Neurons.Count; n++)
            {
                var neuron = childGenes.InputGene.Neurons[n];
                childGenes.InputGene.Neurons[n] = TryMutateNeuron(neuron, mutateChance, out mutated);;
                didMutate = didMutate || mutated;
            }

            for (int h = 0; h < childGenes.HiddenGenes.Count; h++)
            {
                if (_config.MutateNumberOfHiddenNeuronsInLayer)
                {
                    childGenes.HiddenGenes[h] = TryAddNeuronsToLayer(childGenes, h, mutateChance, out mutated);
                    didMutate = didMutate || mutated;
                }

                for (int j = 0; j < childGenes.HiddenGenes[h].Neurons.Count; j++)
                {
                    var neuron = childGenes.HiddenGenes[h].Neurons[j];
                    childGenes.HiddenGenes[h].Neurons[j] = TryMutateNeuron(neuron, mutateChance, out mutated);
                    didMutate = didMutate || mutated;
                }
            }
            return _networkFactory.Create(childGenes);
        }
Example #32
0
        public static async Task Main()
        {
            // Create the network
            INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.Image <Alpha8>(28, 28),
                                                                  NetworkLayers.Convolutional((5, 5), 20, ActivationType.Identity),
                                                                  NetworkLayers.Pooling(ActivationType.LeakyReLU),
                                                                  NetworkLayers.FullyConnected(100, ActivationType.LeCunTanh),
                                                                  NetworkLayers.Softmax(10));

            // Prepare the dataset
            ITrainingDataset trainingData = await Mnist.GetTrainingDatasetAsync(100); // Batches of 100 samples

            ITestDataset testData = await Mnist.GetTestDatasetAsync(p => Printf($"Epoch {p.Iteration}, cost: {p.Result.Cost}, accuracy: {p.Result.Accuracy}"));

            if (trainingData == null || testData == null)
            {
                Printf("Error downloading the datasets");
                Console.ReadKey();
                return;
            }

            // Train the network
            TrainingSessionResult result = await NetworkManager.TrainNetworkAsync(network,
                                                                                  trainingData,
                                                                                  TrainingAlgorithms.AdaDelta(),
                                                                                  60, 0.5f,
                                                                                  TrackBatchProgress,
                                                                                  testDataset : testData);

            Printf($"Stop reason: {result.StopReason}, elapsed time: {result.TrainingTime}");
            Console.ReadKey();
        }
 public DetectorViewModel(ref INeuralNetwork neuralNetwork, Action <string> statusMessageUpdater)
 {
     NeuralNetwork        = neuralNetwork;
     LoadImageCommand     = new RelayCommand(OnLoadImageCommand);
     StatusMessageUpdater = statusMessageUpdater;
     NetworkName          = neuralNetwork.GetNetworkName();
 }
Example #34
0
 public TrainTests()
 {
     _mockTrainer = GetMockTrainer();
     _trainingSet = GetTrainingSet();
     _nn = GetNeuralNetwork();
     MockRandom.SetUp();
 }
        public RobotArm()
        {
            _rand = new Random();
            _normalizationProvider = new RobotArmNormalizationProvider(InputLength, OutputLength);
            _neuralNetwork = new NeuralNetwork(InputLength, OutputLength);

            CacheArmAngles();
        }
Example #36
0
        public async Task StoreNetworkAsync(INeuralNetwork network, double eval)
        {
            var networkParseFormat = new ParseObject(_networkVersion);

            networkParseFormat["jsonNetwork"] = JsonConvert.SerializeObject(network.GetGenes());
            networkParseFormat["eval"]        = eval;
            await networkParseFormat.SaveAsync();
        }
Example #37
0
 public override void Pulse(INeuralNetwork <double> source)
 {
     // Set Neuron Values
     foreach (INeuron <double> neuron in this.Neurons)
     {
         neuron.Pulse(this);
     }
 }
Example #38
0
 public Generation(IEnumerable <INeuralNetwork> neuralNetworks, INeuralNetwork best, INeuralNetwork worst, INeuralNetwork average, int generationIndex)
 {
     NeuralNetworks  = neuralNetworks;
     Best            = best;
     Worst           = worst;
     Average         = average;
     GenerationIndex = generationIndex;
 }
Example #39
0
 public SentenceLearner(WordVectors vectors, Sentences sentences, ContextMaps contextMaps)
 {
     _vectors = vectors;
     _sentences = sentences;
     _network = NeuralNetworkFactory.CreateNeuralNetwork(
             WordVector.VectorLength + MorphoSyntacticContext.VectorLength, MorphoSyntacticContext.VectorLength, WordVector.VectorLength);
     _contextMaps = contextMaps;
     MinError = 0.01;
 }
Example #40
0
        private void CorrectWeights(INeuralNetwork net, double delta)
        {
            IEnumerator<INeuralConnection> en = net.Layers[1][0].NeuralInputs.GetEnumerator();

            while (en.MoveNext())
            {
                en.Current.Weight = en.Current.Weight + alpha * delta * en.Current.Axon.Signal;
            }
        }
 public TrainingSession(INeuralNetwork nn, IEvaluatable evaluatable, int sessionNumber, bool isIdempotent = true)
 {
     NeuralNet = nn;
     _evaluatable = evaluatable;
     _sessionNumber = sessionNumber;
     _hasStoredSessionEval = false;
     _sessionEval = 0;
     _isIdempotent = isIdempotent;
 }
 public string SaveNeuralNetwork(INeuralNetwork network, double networkEvaluation, int epoch)
 {
     var genes = network.GetGenes();
     var json = JsonConvert.SerializeObject(genes);
     var minimized = MinifyJson(json);
     var filename = string.Format("\\network_eval_{0}_epoch_{1}_date_{2}.json", networkEvaluation, epoch, DateTime.Now.Ticks);
     File.WriteAllText(_directory + filename, minimized);
     return filename;
 }
Example #43
0
        //========================= Misc public helpers =========================
        public static void InitializeWeights(INeuralNetwork nn, IRandomGenerator rand)
        {
            var weights = nn.Weights;

            foreach (var weightsSubList in weights)
            {
                for (int i = 0; i < weightsSubList.Length; i++)
                    weightsSubList[i] = rand.NextDouble() - 0.5;
            }
        }
Example #44
0
 public virtual double Train(INeuralNetwork network, MorphoSyntacticContext context)
 {
     var error = 0.0;
     var denominator = 0.0;
     foreach (var contained in ChildActions)
     {
         denominator += 1.0;
         error += contained.Train(network, context);
     }
     return (denominator > 0.0 ? error/denominator : 0.0);
 }
Example #45
0
        public static double GetError(INeuralNetwork nn, IList<InputOutput> testSet)
        {
            var error = 0.0;

            foreach (var inputOutput in testSet)
            {
                var result = nn.FeedForward(inputOutput.Input);
                error += ErrorCalculations.CrossEntropyError(inputOutput.Output, result.Output);
            }

            return error / testSet.Count;
        }
Example #46
0
        public static double GetAccuracy(INeuralNetwork nn, IList<InputOutput> testSet)
        {
            var numHits = 0;

            foreach (var inputOutput in testSet)
            {
                var expected = inputOutput.Output.MaxIndex();
                var actual = nn.FeedForward(inputOutput.Input).Output.MaxIndex();

                numHits += expected == actual ? 1 : 0;
            }

            return ((double)numHits) / testSet.Count;
        }
Example #47
0
        public RNA()
        {
            BackPropNetworkFactory factory = new BackPropNetworkFactory();

            //This is an arralist which holds the number of neurons in each layer
            ArrayList layers = new ArrayList();

            //Cant. de neuronas en la primera capa  (capa de entrada)
            layers.Add(36);
            //Cant. de neuronas en la primera capa
            layers.Add(36);
            //Can. de neuronas en la última capa (capa de salida)
            layers.Add(9);

            //Creo la red a través del patrón factory
            network = factory.CreateNetwork(layers);
        }
 public async Task StoreNetworkAsync(INeuralNetwork network, double eval)
 {
     using (var client = new HttpClient())
     {
         // Replace this with the API key for the web service
         client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey);
         var networkEndpoint = _baseUrl + "/network";
         var genesJson = JsonConvert.SerializeObject(network.GetGenes());
         var message = new NodeJsMessage
         {
             eval = eval,
             version = _networkVersion,
             networkGenes = genesJson
         };
         await client.PostAsJsonAsync(networkEndpoint, message);
     }
 }
Example #49
0
        public void AdjustWeights(INeuralNetwork nn, double[][] weightGradients, double[][] prevWeightGradients)
        {
            var weights = nn.Weights;

            for (var i = 0; i < weightGradients.Length; i++)
            {
                var gradientSubList = weightGradients[i];
                var weightSubList = weights[i];

                for (var j = 0; j < gradientSubList.Length; j++)
                {
                    var prevWeight = weightSubList[j];
                    var fullGradient = gradientSubList[j] + QuadraticRegularization * prevWeight +
                                        Momentum * prevWeightGradients[i][j];
                    weightSubList[j] = prevWeight - LearningRate * fullGradient;
                }
            }
        }
Example #50
0
        public override double Train(INeuralNetwork network, MorphoSyntacticContext context)
        {
            if (_inputArray == null && _wordVector != null)
            {
                _inputArray = new double[_wordVector.Elements.Length + context.Elements.Length];
                _wordVector.Elements.CopyTo(_inputArray, 0);
            }
            if (_inputArray == null) return base.Train(network, context);

            context.Elements.CopyTo(_inputArray, WordVector.VectorLength);

            var example = new Example {Input = _inputArray, ExpectedResult = _outputArray};
            network.Theta.LoadFrom(_weights);
            var error = network.Train(example, 0.01);
            network.Theta.SaveTo(ref _weights);

            var childContext = new MorphoSyntacticContext(network.HiddenLayer);
            error = (error + base.Train(network, childContext)/2.0);
            return error;
        }
Example #51
0
        public RNA()
        {
            BackPropNetworkFactory factory = new BackPropNetworkFactory();

            //This is an arralist which holds the number of neurons in each layer
            ArrayList layers = new ArrayList();

            //Cant. de neuronas en la primera capa  (capa de entrada)
            layers.Add(36);
            //Cant. de neuronas en la capa oculta
            layers.Add(36);
            //Can. de neuronas en la última capa (capa de salida)
            layers.Add(9);

            //Creo la red a través del patrón factory
            network = factory.CreateNetwork(layers);

            //Inicializamos el archivo para guardar las variaciones de Delta y Bias en cada iteracion.
            this.initializeLogFiles();
        }
Example #52
0
        public void Learn(INeuralNetwork net, ILearningSequence<ISupervisedLearningVector> seq)
        {
            LearningStory = new List<NeuronState>();

            bool isLearned = true;
            int counter = 0;
            do
            {
                StoreState(net);
                isLearned = true;
                IEnumerator<ISupervisedLearningVector> en = seq.Vectors.GetEnumerator();
                while (en.MoveNext())
                {
                    net.SetInput(en.Current.Data);
                    net.Compute();
                    CorrectWeights(net, en.Current.CorrectOutput - net.Layers[1][0].Potential);
                }

                en.Reset();

                while (en.MoveNext())
                {
                    net.SetInput(en.Current.Data);

                    net.Compute();

                    if (Math.Abs(en.Current.CorrectOutput - net[0]) > Epsilon)
                    {
                        isLearned = false;
                        break;
                    }
                }
                seq.Shuffle();
                counter++;
            } while (!isLearned && counter < EpochLimit);
            StoreState(net);
            Failed = !isLearned;
        }
        /// <summary>
        /// BrainNet network object to zip stream
        /// </summary>
        /// <param name="network">The network.</param>
        /// <param name="zip">The zip.</param>
        /// <param name="fileName">Name of the file.</param>
        public static void NetworkToZip(INeuralNetwork network, ZipOutputStream zip, string fileName)
        {
            // in this case we have to use a temp file
            string tempFileName = System.IO.Path.GetTempFileName();
            NetworkSerializer serializer = new NetworkSerializer();
            serializer.SaveNetwork(tempFileName, network);

            FileStream fileStream = File.OpenRead(tempFileName);
            byte[] buffer = new byte[fileStream.Length];
            fileStream.Read(buffer, 0, buffer.Length);
            PutZipEntry(zip, buffer, fileName);
            fileStream.Close();

            // cleans up
            File.Delete(tempFileName);
        }
 public void StoreNetwork(INeuralNetwork network, double eval)
 {
     policy.Execute(() =>
     {
         var container = GetBlobContainer();
         var topEval = GetTopEval(container);
         if (topEval >= eval)
         {
             return;
         }
         var serialized = JsonConvert.SerializeObject(network.GetGenes());
         UploadToBlob(container, _evalDirectory + "/" + _version + "/" + eval, serialized);
     });
 }
 public GameEvaluation(Game game, INeuralNetwork neuralNet)
 {
     _game = game;
     _neuralNet = neuralNet;
 }
 public IEvaluatable Create(INeuralNetwork neuralNetwork)
 {
     return new GameEvaluation(new Game(10, 10, 300), neuralNetwork);
 }
Example #57
0
 private void StoreState(INeuralNetwork net)
 {
     NeuronState state = new NeuronState();
     INeuron n = net.Layers[1][0];
     state.Inputs = new double[n.NeuralInputs.Count];
     state.Weights = new double[n.NeuralInputs.Count];
     int i = 0;
     foreach (INeuralConnection ni in n.NeuralInputs)
     {
         state.Weights[i] = ni.Weight;
         state.Inputs[i] = ni.Signal;
         i++;
     }
     state.Output = net[0];
     LearningStory.Add(state);
 }
 public async Task StoreNetworkAsync(INeuralNetwork network, double eval)
 {
     await policy.ExecuteAsync(async () =>
     {
         var container = GetBlobContainer();
         var topEval = GetTopEval(container);
         if (topEval >= eval)
         {
             return;
         }
         var serialized = JsonConvert.SerializeObject(network.GetGenes());
         await UploadToBlobAsync(container, _evalDirectory + "/" + _version + "/" + eval, serialized);
     });
 }
Example #59
0
 //========================= Inheritance contract =========================
 public abstract void Train(IList<InputOutput> trainingSet, 
     IList<InputOutput> validationSet,
     IRandomGenerator rand,
     INeuralNetwork nn);
Example #60
0
        //================= Private Helpers =======================
        private Mock<BaseTrainer> GetMockTrainer()
        {
            var mock = new Mock<BaseTrainer>();
            mock.Setup(t => t.Train(It.IsAny<IList<InputOutput>>(), 
                                    It.IsAny<IList<InputOutput>>(), 
                                    It.IsAny<IRandomGenerator>(),
                                    It.IsAny<INeuralNetwork>()))
                .Callback((IList<InputOutput> trainingSubSet,
                           IList<InputOutput> validationSubSet,
                           IRandomGenerator rand,
                           INeuralNetwork nn) =>
                {
                    _trainingSubSet = trainingSubSet;
                    _validationSubSet = validationSubSet;
                    _trainedNeuralNet = nn;
                });
            mock.Setup(t => t.Validate());
            mock.Setup(t => t.GetValidationSetFraction()).Returns(0);

            return mock;
        }