Example #1
0
 internal static int Validate(Network network, double[] trainClasses, List<int> classes, double[][] input, double[][] output)
 {
     int result = 0;
     double[] tmp;
     for (int i = 0; i < input.Length; i++)
     {
         tmp = network.Compute(input[i]);
         if (trainClasses[i] == classes[Common.Max(tmp)])
             result++;
     }
     return result;
 }
        public Subnet(Network network)
        {
            _ID++;
            this.ID = _ID;
            this.network = network;
            this.topology = new int[this.network.Layers.Length];

            for (int layer = 0; layer < this.network.Layers.Length; layer++)
            {
                topology[layer] = this.network.Layers[layer].Neurons.Length;
            }
        }
 //Кол-во связей в сети
 public static int getCountOfWeights(Network network)
 {
     int i = 0;
     for (int layer = 0; layer < network.Layers.Length; layer++)
     {
         for (int neuron = 0; neuron < network.Layers[layer].Neurons.Length; neuron++)
         {
             for (int weight = 0; weight < network.Layers[layer].Neurons[neuron].Weights.Length; weight++)
             {
                 i++;
             }
         }
     }
     return i;
 }
 //инициализация временного массива для поддержки отключения и включения нейронов и связей
 public static double[][][][] emptyWeightsArray(Network network)
 {
     double[][][][] tempWeights = new double[network.Layers.Length][][][];
     for (int i = 0; i < network.Layers.Length; i++)
     {
         tempWeights[i] = new double[network.Layers[i].Neurons.Length][][];
         for (int j = 0; j < network.Layers[i].Neurons.Length; j++)
         {
             tempWeights[i][j] = new double[network.Layers[i].Neurons[j].Weights.Length][];
             for (int k = 0; k < network.Layers[i].Neurons[j].Weights.Length; k++)
             {
                 tempWeights[i][j][k] = new double[1];
             }
         }
     }
     return tempWeights;
 }
Example #5
0
    public NetworkPorts(Neural.Network network,
                        IDictionary <int, IReceptiveField> upperNeurons,
                        IDictionary <int, IReceptiveField> lowerNeurons,
                        IDictionary <int, IReceptiveField> positionNeurons,
                        IDictionary <int, IReceptiveField> speedNeurons
                        )
    {
        this.network = network;

        this.input  = new double[network.NeuronCount];
        this.output = new double[network.NeuronCount];

        UpperTheta = new NetworkInputPort(input, upperNeurons);
        LowerTheta = new NetworkInputPort(input, lowerNeurons);
        Position   = new NetworkInputPort(input, positionNeurons);

        Speed = new NetworkSumOutputPort(output, speedNeurons);
    }
    public NetworkPorts(Neural.Network network,
    IDictionary<int, IReceptiveField> upperNeurons,
    IDictionary<int, IReceptiveField> lowerNeurons,
    IDictionary<int, IReceptiveField> positionNeurons,
    IDictionary<int, IReceptiveField> speedNeurons
  )
    {
        this.network = network;

        this.input = new double[network.NeuronCount];
        this.output = new double[network.NeuronCount];

        UpperTheta = new NetworkInputPort(input, upperNeurons);
        LowerTheta = new NetworkInputPort(input, lowerNeurons);
        Position = new NetworkInputPort(input, positionNeurons);

        Speed = new NetworkSumOutputPort(output, speedNeurons);
    }
        //получение всех доступных связей для подсоединения(входы)
        public static List<String> getAvailableWeights(Network net)
        {
            List<string> availableNeurons = new List<String>();

            //add neurons for available neurons, for connected subnet inputs
            for (int layer = 0; layer < net.Layers.Length; layer++)
            {
                for (int neuron = 0; neuron < net.Layers[layer].Neurons.Length; neuron++)
                {
                    for (int weight = 0; weight < net.Layers[layer].Neurons[neuron].Weights.Length; weight++)
                    {
                        availableNeurons.Add(layer.ToString() + ":" + neuron.ToString() + ":" + weight.ToString());
                    }
                }
            }

            return availableNeurons;
        }
    public static NetworkPorts FromGenotype(NEAT.Genotype genotype)
    {
        var neuronGenes = genotype.NeuronGenes.ToList();
        var synapseGenes = genotype.SynapseGenes.ToList();

        var network = new Neural.Network(20ul);

        var upperNeurons = new Dictionary<int, IReceptiveField>();
        var lowerNeurons = new Dictionary<int, IReceptiveField>();
        var positionNeurons = new Dictionary<int, IReceptiveField>();
        var speedNeurons = new Dictionary<int, IReceptiveField>();

        foreach (var neuronGene in neuronGenes) {
          float a = NumberHelper.Scale(neuronGene.a, 0.02f, 0.1f); // 0.1
          float b = NumberHelper.Scale(neuronGene.b, 0.2f, 0.25f); // 0.2
          float c = NumberHelper.Scale(neuronGene.c, -65.0f, -50.0f); // -65.0
          float d = NumberHelper.Scale(neuronGene.d, 0.05f, 8.0f); // 2.0

          try {
          var id = (int)network.AddNeuron(Neural.IzhikevichConfig.Of(a, b, c, d));

        var mean = 0.0f;
        var sigma = 0.0f;
        switch (neuronGene.type) {
          case NeuronType.UpperNeuron:
            mean = NumberHelper.Scale(neuronGene.mean, -180.0f, 180.0f);
            sigma = NumberHelper.Scale(neuronGene.sigma, 0.0f, 180.0f);
            upperNeurons[id] = new SignReceptiveField(mean, sigma);
            break;
          case NeuronType.LowerNeuron:
            mean = NumberHelper.Scale(neuronGene.mean, -180.0f, 180.0f);
            sigma = NumberHelper.Scale(neuronGene.sigma, 0.0f, 180.0f);
            lowerNeurons[id] = new SignReceptiveField(mean, sigma);
            break;
          case NeuronType.PositionNeuron:
            mean = NumberHelper.Scale(neuronGene.mean, -12.0f, 12.0f);
            sigma = NumberHelper.Scale(neuronGene.sigma, 0.0f, 12.0f);
            positionNeurons[id] = new SignReceptiveField(mean, sigma);
            break;
          case NeuronType.SpeedNeuron:
            mean = NumberHelper.Scale(neuronGene.mean, -1.0f, 1.0f);
            sigma = NumberHelper.Scale(neuronGene.sigma, 0.0f, 1000.0f);
            speedNeurons[id] = new MulReceptiveField(mean, sigma);
            break;
        }
          } catch (Exception e) {
        Debug.LogException(e);
          }
        }

        // Connect each input neuron to the output neuron.
        foreach (var synapseGene in synapseGenes) {
          if (!synapseGene.isEnabled) {
        continue;
          }

          var fromNeuronId = neuronGenes.FindIndex(n => n.InnovationId == synapseGene.fromNeuronId);
          var toNeuronId = neuronGenes.FindIndex(n => n.InnovationId == synapseGene.toNeuronId);

          Assert.AreNotEqual(fromNeuronId, -1, "Must find from-neuron id");
          Assert.AreNotEqual(toNeuronId, -1, "Must find to-neuron id");

          float weight = NumberHelper.Scale(synapseGene.weight, -40.0f, 40.0f);

          try {
        network.AddSynapse((ulong)fromNeuronId, (ulong)toNeuronId, Neural.STDPConfig.Of(weight, -40.0f, 40.0f));
          } catch (Exception e) {
        Debug.LogException(e);
          }
        }

        return new NetworkPorts(network, upperNeurons, lowerNeurons, positionNeurons, speedNeurons);
    }
        //запуск тестирования сети с желаемым классом
        public static double testing(Network network, double[,] data, int[] classes, List<int> classesList)
        {
            double[] res;
            int colCountData = network.InputsCount;
            double[] input = new double[colCountData];
            double validate = 0.0;
            double testQuality = 0.0;

            for (int count = 0; count < data.GetLength(0) - 1; count++)
            {
                try
                {
                    //gather inputs for compute, n-1 inputs
                    for (int i = 0; i < colCountData ; i++)
                    {
                        input[i] = data[count, i];
                    }
                    res = network.Compute(input);
                    double output = classesList[ANNUtils.max(res)];
                    double value = Math.Abs(classes[count] - output);

                    validate += value;
                }
                catch (Exception e)
                {
                    MessageBox.Show("Ошибка тестирования сети." + e.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                    break;
                }

            }
            testQuality = (1 - (validate / data.GetLength(0))) * 100;

            return testQuality;
        }
Example #10
0
		/// <summary>
		/// Creates a new training epoch event.
		/// </summary>
		/// <param name="trainer">The trainer conducting the trainer</param>
		/// <param name="network">The network being trained</param>
		public TrainingEpochEvent(Trainer trainer, Network network) {
			this.trainer = trainer;
			this.network = network;
		}
Example #11
0
    public static NetworkPorts FromGenotype(NEAT.Genotype genotype)
    {
        var neuronGenes  = genotype.NeuronGenes.ToList();
        var synapseGenes = genotype.SynapseGenes.ToList();

        var network = new Neural.Network(20ul);

        var upperNeurons    = new Dictionary <int, IReceptiveField>();
        var lowerNeurons    = new Dictionary <int, IReceptiveField>();
        var positionNeurons = new Dictionary <int, IReceptiveField>();
        var speedNeurons    = new Dictionary <int, IReceptiveField>();

        foreach (var neuronGene in neuronGenes)
        {
            float a = NumberHelper.Scale(neuronGene.a, 0.02f, 0.1f);    // 0.1
            float b = NumberHelper.Scale(neuronGene.b, 0.2f, 0.25f);    // 0.2
            float c = NumberHelper.Scale(neuronGene.c, -65.0f, -50.0f); // -65.0
            float d = NumberHelper.Scale(neuronGene.d, 0.05f, 8.0f);    // 2.0

            try {
                var id = (int)network.AddNeuron(Neural.IzhikevichConfig.Of(a, b, c, d));

                var mean  = 0.0f;
                var sigma = 0.0f;
                switch (neuronGene.type)
                {
                case NeuronType.UpperNeuron:
                    mean             = NumberHelper.Scale(neuronGene.mean, -180.0f, 180.0f);
                    sigma            = NumberHelper.Scale(neuronGene.sigma, 0.0f, 180.0f);
                    upperNeurons[id] = new SignReceptiveField(mean, sigma);
                    break;

                case NeuronType.LowerNeuron:
                    mean             = NumberHelper.Scale(neuronGene.mean, -180.0f, 180.0f);
                    sigma            = NumberHelper.Scale(neuronGene.sigma, 0.0f, 180.0f);
                    lowerNeurons[id] = new SignReceptiveField(mean, sigma);
                    break;

                case NeuronType.PositionNeuron:
                    mean  = NumberHelper.Scale(neuronGene.mean, -12.0f, 12.0f);
                    sigma = NumberHelper.Scale(neuronGene.sigma, 0.0f, 12.0f);
                    positionNeurons[id] = new SignReceptiveField(mean, sigma);
                    break;

                case NeuronType.SpeedNeuron:
                    mean             = NumberHelper.Scale(neuronGene.mean, -1.0f, 1.0f);
                    sigma            = NumberHelper.Scale(neuronGene.sigma, 0.0f, 1000.0f);
                    speedNeurons[id] = new MulReceptiveField(mean, sigma);
                    break;
                }
            } catch (Exception e) {
                Debug.LogException(e);
            }
        }

        // Connect each input neuron to the output neuron.
        foreach (var synapseGene in synapseGenes)
        {
            if (!synapseGene.isEnabled)
            {
                continue;
            }

            var fromNeuronId = neuronGenes.FindIndex(n => n.InnovationId == synapseGene.fromNeuronId);
            var toNeuronId   = neuronGenes.FindIndex(n => n.InnovationId == synapseGene.toNeuronId);

            Assert.AreNotEqual(fromNeuronId, -1, "Must find from-neuron id");
            Assert.AreNotEqual(toNeuronId, -1, "Must find to-neuron id");

            float weight = NumberHelper.Scale(synapseGene.weight, -40.0f, 40.0f);

            try {
                network.AddSynapse((ulong)fromNeuronId, (ulong)toNeuronId, Neural.STDPConfig.Of(weight, -40.0f, 40.0f));
            } catch (Exception e) {
                Debug.LogException(e);
            }
        }

        return(new NetworkPorts(network, upperNeurons, lowerNeurons, positionNeurons, speedNeurons));
    }
Example #12
0
		private void ConnectHiddenLayerToOutputs(Network network, int layerIdx, int size) 
		{
			for(int neuronIdx = 0; neuronIdx < size; neuronIdx++) 
			{
				String newNeuronName = "Hidden " + layerIdx + ", " + neuronIdx;
				foreach(string name in outputNeurons) {
					network.Connect(newNeuronName, name);
				}
			}
		}
Example #13
0
        private void GetNNStat(string type)
        {
            // По каждой папке изучаемого ...
            foreach (var rabbit in rabbits)
            {
                // Path.GetFileName(rabbit) возвращает название папки кролика
                // ...//Батчаев Роберт -> Батчаев Роберт

                string nn_path = Path.Combine(rabbit, "сети");
                Network[] networks = new Network[0];
                if(type == "way")
                {
                    networks = new Network[4]{
                        Network.Load(Path.Combine(nn_path,"network-1.nn")),
                        Network.Load(Path.Combine(nn_path,"network-2.nn")),
                        Network.Load(Path.Combine(nn_path,"network-3.nn")),
                        Network.Load(Path.Combine(nn_path,"network-4.nn"))
                    };
                } else if(type == "angle")
                {
                    networks = new Network[4]{
                        Network.Load(Path.Combine(nn_path,"network-0.nn")),
                        Network.Load(Path.Combine(nn_path,"network-90.nn")),
                        Network.Load(Path.Combine(nn_path,"network-180.nn")),
                        Network.Load(Path.Combine(nn_path,"network-270.nn"))
                    };
                }

                Rabbit _rabbit = new Rabbit(networks);

                // Пути к файлам статистик ходов исследуемых
                stats = Directory.GetFiles(Path.Combine(rabbit, "ходы"), "*.txt");
                Dictionary<string, NNWalkingStat> walks = new Dictionary<string, NNWalkingStat>();
                Dictionary<string, NNWalkingStat> walks_rage = new Dictionary<string, NNWalkingStat>();

                foreach (var stat in stats)
                {
                    walks.Add(     stat, _rabbit.GetSteps(stat, false));
                    walks_rage.Add(stat, _rabbit.GetSteps(stat, true));
                }

                NNWalkingStat totalWalk = NNWalkingStat.Sum(walks);
                NNWalkingStat totalWalkRage = NNWalkingStat.Sum(walks_rage);

                StringBuilder sb = new StringBuilder();
                string path = string.Empty;

                sb.AppendLine("totalErrors\tbonuses\tup\tright\tdown\tleft\ttotalGoods");
                sb.AppendLine(
                    totalWalk.totalErrors + "\t" +
                    totalWalk.totalBonuses + "\t" +
                    totalWalk.totalWays[0] + "\t" +
                    totalWalk.totalWays[1] + "\t" +
                    totalWalk.totalWays[2] + "\t" +
                    totalWalk.totalWays[3] + "\t" +
                    totalWalk.totalGoods
                );
                path = "\\[automated]ошибки-без-встряхивания.txt";
                System.IO.File.WriteAllText(rabbit + path, sb.ToString());

                sb.Clear();

                sb.AppendLine("totalErrors\tbonuses\tup\tright\tdown\tleft\ttotalGoods");
                sb.AppendLine(
                    totalWalkRage.totalErrors + "\t" +
                    totalWalkRage.totalBonuses + "\t" +
                    totalWalkRage.totalWays[0] + "\t" +
                    totalWalkRage.totalWays[1] + "\t" +
                    totalWalkRage.totalWays[2] + "\t" +
                    totalWalkRage.totalWays[3] + "\t" +
                    totalWalkRage.totalGoods
                );
                path = "\\[automated]ошибки-со-встряхиванием.txt";
                System.IO.File.WriteAllText(rabbit + path, sb.ToString());

            }//foreach (var rabbit in rabbits)
            MessageBox.Show("");
            Application.Exit();
        }
Example #14
0
		/// <summary>
		///  <p>
		/// Called to train the network.  Iterates until the terminating flag is
		/// set.  If there are no training event listeners set then there is a
		/// hard termination limit of 10000 (by default) iterations.
		/// </p>
		/// <p>
		/// The training algorithm is simple.  It presents the training examples
		/// in order, up to epochSize before ending the epoch.  At that point the
		/// error for the epoch is calculated and the EndEpochEvent is sent to all
		/// the listeners.  The weights of the network are then adjusted.  Training
		/// stops when the hard limit is reached (given no listeners) or by a listener
		/// requesting a termination.
		/// </p>
		/// </summary>
		/// <param name="network"></param>
		public void Train(Network network)
		{
			Dictionary<String, double> results;
			terminating = false;
			network.BeginTraining(this);
			epochCount = 0;
        
			if(errorManager.Count == 0) 
			{
				errorManager.AddCalculator("MSE", new MSEErrorCalculator(trainingExamples.Count));
				defaultErrorName = "MSE";
			}

			if(StartTrainingDelegates != null) {
				StartTrainingDelegates(new TrainingEpochEvent(this, network));
			}
			while(!terminating) 
			{
				try 
				{
					for(int i = 0; i < epochSize; i++) 
					{
						Random r = new Random();
						Example example = trainingExamples[r.Next(0, trainingExamples.Count - 1)];
						results = network.Process(example.Inputs);
						errorManager.AccumulateError(example.Expected, results);
						network.SetFeedback(results, example.Expected);
						if(EndPresentationDelegates != null) {
							EndPresentationDelegates(new TrainingEpochEvent(this, network));
						}
					}
					epochCount++;

					if(EndEpochDelegates != null) {
						EndEpochDelegates(new TrainingEpochEvent(this, network));
					}

					UpdateNetwork(network);
					
					if(PostUpdateDelegates != null) {
						PostUpdateDelegates(new TrainingEpochEvent(this, network));
					}

					if((epochEventListeners.Count == 0) && (epochCount > hardMax)) 
					{
						terminating = true;
					}
				} catch(Exception e) 
				{
					Console.WriteLine(e.StackTrace);
					throw new TrainingException("Unable to train netowrk due to exception", e);
				}
			}
			network.EndTraining();
		}
 //подсчитывает количество нейронов заданной сети
 public static int getNeuronsCount(Network network)
 {
     int i = 0;
     for (int layer = 0; layer < network.Layers.Length; layer++)
     {
         for (int neuron = 0; neuron < network.Layers[layer].Neurons.Length; neuron++)
         {
             i++;
         }
     }
     return i;
 }
Example #16
0
		private void CreateInputNeurons(Network network)  
		{
			foreach(string name in inputNeurons) 
			{
				network.AddInputNeuron(new Neuron(name));
			}
		}
Example #17
0
		/**
		* Build a network.  This method causes the network builder to build a fully
		* connected, feed-forward neural network.
		* @return A constructed network.
		*/    
		public virtual Network BuildNetwork() 
		{
			Network result = null;
        
			result = new Network();
			result.ActivationFactory = activationFactory;
			CreateInputNeurons(result);
			CreateOutputNeurons(result);
			CreateInternalNeurons(result);
        
			if(layerSizes.Count > 0) 
			{
				for(int i = 0; i < layerSizes.Count; i++) 
				{
					int size = (int)layerSizes[i];
                
					//
					// If this is the first hidden layer - connect it to the inputs.
					//
					if(i == 0) 
					{
						ConnectHiddenLayerToInputs(result, i, size);
					}
                
					//
					// If this is the last hidden layer - connect it to the outputs.
					// Note that if there is 1 hidden layer then we are done connecting
					// the layer.
					//
					if(i == (layerSizes.Count - 1)) 
					{
						ConnectHiddenLayerToOutputs(result, i, size);
					} 
                
					//
					// if we are not the first layer of hidden neurons, we need to
					// connect to the previous layer.  We only enter this if we have
					// more than one layer.
					//
					if((i != 0) && (i < layerSizes.Count)) 
					{
						ConnectInternalLayers(result, i, size, (int)layerSizes[i - 1]);
					}
				}
			}
        
			return result;
		}
Example #18
0
		private void ConnectInternalLayers(Network network, int layerIdx, 
			int layerSize, int priorSize) 
		{
			for(int currNeuronIdx = 0; currNeuronIdx < layerSize; currNeuronIdx++) 
			{
				for(int priorNeuronIdx = 0; priorNeuronIdx < priorSize; priorNeuronIdx++) 
				{
					String currNeuronName  = "Hidden " + layerIdx       + ", " + currNeuronIdx;
					String priorNeuronName = "Hidden " + (layerIdx - 1) + ", " + priorNeuronIdx;
					network.Connect(priorNeuronName, currNeuronName);
				}
			}
		}
Example #19
0
		private void CreateInternalNeurons(Network network) 
		{
			for(int sizeIdx = 0; sizeIdx < layerSizes.Count; sizeIdx++) 
			{
				int hiddenLayerSize = (int)layerSizes[sizeIdx];
				for(int neuronIdx = 0; neuronIdx < hiddenLayerSize; neuronIdx++) 
				{
					string newNeuronName = "Hidden " + sizeIdx + ", " + neuronIdx;
					network.AddInternalNeuron(new Neuron(newNeuronName));
				}
			}
		}
Example #20
0
		private void CreateOutputNeurons(Network network) 
		{
			foreach(string name in outputNeurons) 
			{
				network.AddOutputNeuron(new Neuron(name));
			}
		}
Example #21
0
		/// <summary>
		/// Evaluates the network, returning the evaluation of the error.
		/// </summary>
		/// <param name="examples">The examples to evaluate</param>
		/// <param name="network">The network to evalute</param>
		/// <returns>An error report for the valuation.</returns>
		public Dictionary<String, double> Evaluate(Network network, ExampleSet examples) 
		{
			ZeroErrors();
			for(int i = 0; i < examples.Count; i++) 
			{
				Example example = examples[i];
				Dictionary<String, double> results = network.Process(example.Inputs);
				CalculateErrors(example.Expected, results);
			}
			return errorValues;
		}
        private List<String> sortByLayers(List<String> weights, Network network)
        {
            List<String> returnList = new List<String>();
            Dictionary<int, double> totalSum = new Dictionary<int, double>();
            if (fromInputToOutputBox.Checked == true)
            {
                returnList = weights;
                returnList.Sort();

                if (sortSignFromToSortBox.Text == "-1")
                {
                    returnList.Reverse();
                }

            }else if(bySumWeightsBox.Checked == true)
            {

                for (int i = 0; i < network.Layers.Length; i++)
                {
                    double absSum = 0;
                    for (int j = 0; j < network.Layers[i].Neurons.Length; j++)
                    {
                        for (int k = 0; k < network.Layers[i].Neurons[j].Weights.Length; k++)
                        {
                            absSum += Math.Abs(network.Layers[i].Neurons[j].Weights[k]);
                        }
                    }
                    totalSum.Add(i, absSum);
                }
                if (sortSignAbsSumSortBox.Text == "1")
                {
                    totalSum = totalSum.OrderBy(pair => pair.Value).ToDictionary(pair => pair.Key, pair => pair.Value);
                }

                if (sortSignAbsSumSortBox.Text == "-1")
                {
                    totalSum = totalSum.OrderByDescending(pair => pair.Value).ToDictionary(pair => pair.Key, pair => pair.Value);
                }

                foreach(KeyValuePair<int, double> pair in totalSum)
                {
                    foreach(String weight in weights)
                    {
                        int layer = Int32.Parse(weight.Split(':')[0]);
                        if (layer == pair.Key)
                        {
                            returnList.Add(weight);
                        }
                    }
                }
            }

            return returnList;
        }
Example #23
0
		///
		/// Save the network to the pocket with the named error calculator.
		/// @param error The name of the error.
		/// @param network The network to save.
		/// @param trainer The trainer training the network.
		///
		public void SaveNetwork(string error, Network network, Trainer trainer) 
		{
			PocketManager manager = (PocketManager)managers[error];
			if(trainer.ErrorManager.GetError(error) < manager.GetLastUpdateError())  
			{
				manager.SaveNetwork(network, trainer);
			}
		}
        private List<string> sortByModuleAsc(List<String> weights, Network network)
        {
            List<String> returnList = new List<string>();
            Dictionary<string, double> topology = new Dictionary<string, double>();
            int layer, neuron, weight = 0;

            String[] split = new String[3];

            foreach(String key in weights)
            {
                split = key.Split(':');
                layer = Int32.Parse(split[0]);
                neuron = Int32.Parse(split[1]);
                weight = Int32.Parse(split[2]);

                topology.Add(key, Math.Abs(network.Layers[layer].Neurons[neuron].Weights[weight]));

            }

            topology = topology.OrderBy(pair => pair.Value).ToDictionary(pair => pair.Key, pair => pair.Value);

            foreach(KeyValuePair<string, double> pair in topology)
            {
                returnList.Add(pair.Key);
            }
            return returnList;
        }
Example #25
0
		/// <summary>
		/// Called at the end of an epoch.  In this case it sends the end epoch
		/// message to all the neuron trainers.
		/// </summary>
		/// <param name="network">The network being trained</param>
		public void UpdateNetwork(Network network) 
		{
				//errors.add(new Double(error));
			UpdateWeightAdjustments();
			network.AdjustWeights();
			foreach(NeuronTraining nt in neuronTrainers) {
				nt.EndEpoch();
			}
			errorManager.ClearErrors();
		}
Example #26
0
        private void buttonGetNNStat_Click()
        {
            Random r = new Random();

            // По каждой папке изучаемого ...
            foreach (var rabbit in rabbits)
            {
                // Запоминаем пути к файлам статистик ходов исследуемых
                stats = Directory.GetFiles(Path.Combine(rabbit, "ходы"), "*.txt");

                string nn_path = Path.Combine(rabbit, "сети");
                Network[] networks = new Network[4]{
                        Network.Load(Path.Combine(nn_path,"network-0.nn")),
                        Network.Load(Path.Combine(nn_path,"network-90.nn")),
                        Network.Load(Path.Combine(nn_path,"network-180.nn")),
                        Network.Load(Path.Combine(nn_path,"network-270.nn"))
                    };

                bool rageMode = false;
                for (int it = 0; it < 2; it++)
                {
                    int experimentIndex = 0;
                    Dictionary<int, int[]> _ways = new Dictionary<int, int[]>();
                    Dictionary<int, int> _errors = new Dictionary<int, int>();
                    Dictionary<int, int> _goods = new Dictionary<int, int>();
                    Dictionary<int, int> _bonuses = new Dictionary<int, int>();

                    #region _foreach_stat_
                    // По каждому файлу статистики ...
                    foreach (var stat in stats)
                    {
                        experimentIndex++;
                        _errors.Add(experimentIndex, 0);
                        _goods.Add(experimentIndex, 0);
                        _bonuses.Add(experimentIndex, 0);
                        _ways.Add(experimentIndex, new int[4]);
                        string[] strings = File.ReadAllLines(stat);
                        Dictionary<string, string> configs = new Dictionary<string, string>(strings.Length);

                        #region Parsing
                        int map_line = -1;
                        foreach (var s in strings)
                        {
                            map_line++;

                            // Обнаружили [map], выходим из цикла, дальше только карта
                            if (s.StartsWith("[map]"))
                                break;

                            configs.Add(s.Split(':')[0], s.Split(':')[1]);
                        }
                        /*  map_ip:Карты\карта-15x15_19.txt
                            start:1,10
                            bonuses:10
                            up:13
                            right:17
                            down:7
                            left:5                          */
                        int start_x = Int32.Parse(configs["start"].Split(',')[0]);
                        int start_y = Int32.Parse(configs["start"].Split(',')[1]);

                        // Считываем карту что после [map]
                        string[] current_map = new string[15];
                        for (int i = ++map_line; i < strings.Length; i++)
                        {
                            current_map[i - map_line] = strings[i];
                        }
                        int[,] map = new int[15, 15];
                        for (int j = 0; j < 15; j++)
                        {
                            for (int i = 0; i < 15; i++)
                            {
                                map[i, j] = Int32.Parse(current_map[j][i].ToString());
                            }
                        }
                        #endregion

                        int x = start_x;
                        int y = start_y;
                        int gathered_bonuses = 0;
                        int[] ways = new int[4];
                        map[x, y] = 3;

                        int[,] memoryMap = new int[15, 15];
                        Common.Initialize(ref memoryMap, 8);

                        bool rage = false;
                        int rageValue = 4;
                        int rageWay = 0;
                        List<string> _rageWays = new List<string>();

                        for (int step = 0; step < 42; step++)
                        {
                            string[] vision = new string[25];
                            string[] memory = new string[15 * 15];

                            vision = Common.GenerateVision(x, y, map);

                            //int index = 0;
                            for (int j = y - 2; j <= y + 2; j++)
                            {
                                for (int i = x - 2; i <= x + 2; i++)
                                {
                                    if (i < 0 || i >= 15 || j < 0 || j >= 15)
                                    {
                                        continue;
                                    }
                                    else
                                    {
                                        memoryMap[i, j] = map[i, j];
                                        //memory[index] = map[i, j].ToString();
                                    }
                                }
                            }

                            memory = Common.GenerateMemory(memoryMap);
                            var tmp = new List<string>(memory);
                            tmp.AddRange(vision);
                            double[] double_tmp = tmp.Select(n => Double.Parse(n)).ToArray();

                            //string nnWay = Common.RecognisePath(networks, double_tmp);
                            string nnAngle = Common.RecognisePath(networks, double_tmp);
                            int practiceWayOfGOing = 0;
                            int theoryBestWayGoing = Common.GetBestDirection(memoryMap, x, y, "");
                            if (theoryBestWayGoing == -1)
                            {
                                continue;
                            }

                            var tmp2 = Common.ParseStringArrayVision(memory, 15);
                            var tmp3 = tmp2;

                            /*    1
                             * 4     2
                             * 	  3    */
                            switch (nnAngle)
                            {
                                case "0":
                                    practiceWayOfGOing = theoryBestWayGoing;
                                    break;
                                case "90":
                                    practiceWayOfGOing = theoryBestWayGoing == 4 ? 1 : ++theoryBestWayGoing;
                                    break;
                                case "180":
                                    switch (theoryBestWayGoing)
                                    {
                                        case 1:
                                        case 2:
                                            practiceWayOfGOing = theoryBestWayGoing + 2;
                                            break;
                                        case 3:
                                            practiceWayOfGOing = 1;
                                            break;
                                        case 4:
                                            practiceWayOfGOing = 2;
                                            break;
                                    }
                                    break;
                                case "270":// 1 + 270/90
                                    practiceWayOfGOing = theoryBestWayGoing == 1 ? 4 : --theoryBestWayGoing;
                                    break;
                            }

                            if (rageMode)
                            {
                                _rageWays.Add(practiceWayOfGOing.ToString());
                                if (Common.ChechRage(_rageWays) == true)
                                    rage = true;

                                //if (!rage)
                                //    practiceWayOfGOing = Int32.Parse(nnWay);
                                if (rage)
                                {
                                    //if (rageWay == 0)
                                    //    rageWay = r.Next(1, 5);
                                    practiceWayOfGOing = r.Next(1, 5);
                                    if (rageValue-- > 0)
                                    {
                                    }
                                    else
                                    {
                                        rage = false;
                                        rageValue = 4;
                                        rageWay = 0;
                                    }
                                }
                            }

                            ways[practiceWayOfGOing - 1]++;
                            if (practiceWayOfGOing != theoryBestWayGoing && _errors.ContainsKey(experimentIndex))
                                _errors[experimentIndex]++;
                            else
                                _goods[experimentIndex]++;

                            map[x, y] = 1;
                            switch (practiceWayOfGOing)
                            {
                                case 1:
                                    Map_staff.go_up(ref x, ref y, map);
                                    break;
                                case 2:
                                    Map_staff.go_right(ref x, ref y, map);
                                    break;
                                case 3:
                                    Map_staff.go_down(ref x, ref y, map);
                                    break;
                                case 4:
                                    Map_staff.go_left(ref x, ref y, map);
                                    break;
                                default:
                                    return;
                            }
                            if (map[x, y] == 2)
                            {
                                gathered_bonuses++;
                            }
                            map[x, y] = 3;

                        }/* for step 0 -> 42 */
                        _ways[experimentIndex] = ways;
                        _bonuses[experimentIndex] = gathered_bonuses;
                    }/* foreach stat in stats*/
                    #endregion

                    StringBuilder sb = new StringBuilder();
                    int totalErrors = 0;
                    foreach (var key in _errors.Keys)
                    {
                        totalErrors += _errors[key];
                    }
                    int totalGoods = 0;
                    foreach (var key in _goods.Keys)
                    {
                        totalGoods += _goods[key];
                    }
                    int totalBonuses = 0;
                    foreach (var key in _bonuses.Keys)
                    {
                        totalBonuses += _bonuses[key];
                    }
                    int[] totalWays = new int[4];
                    foreach (var key in _ways.Keys)
                    {
                        totalWays[0] += _ways[key][0];
                        totalWays[1] += _ways[key][1];
                        totalWays[2] += _ways[key][2];
                        totalWays[3] += _ways[key][3];
                    }

                    sb.AppendLine("totalErrors\tbonuses\tup\tright\tdown\tleft\ttotalGoods");
                    sb.AppendLine(totalErrors + "\t" + totalBonuses + "\t" + totalWays[0] + "\t" + totalWays[1] + "\t" + totalWays[2] + "\t" + totalWays[3] + "\t" + totalGoods);

                    string path = string.Empty;
                    if (rageMode)
                        path = "\\[automated]ошибки-со-встряхиванием.txt";
                    else
                        path = "\\[automated]ошибки-без-встряхивания.txt";
                    System.IO.File.WriteAllText(rabbit + path, sb.ToString());

                    rageMode = true;
                }// for 2
            }
            Application.Exit();
        }