Ejemplo n.º 1
0
        public async Task Organizing()
        {
            var networkConfig  = new NetworkConfiguration(3, 1);
            var network        = new KohonenNetwork <Logistic>(networkConfig);
            var learningConfig = new LearningConfiguration
            {
                ThetaFactorPerEpoch = 0.95,
                OrganizingAlgorithm = new Organizing <Logistic>(network, 0.777)
            };
            var learning = new UnsupervisedLearning(network, learningConfig);

            var inputs = _getInputs();
            await learning.LearnAsync(inputs, 25);

            network.Input(_control[0]);
            var res0 = await network.GetOutputIndexAsync();

            network.Input(_control[1]);
            var res1 = await network.GetOutputIndexAsync();

            network.Input(_control[2]);
            var res2 = await network.GetOutputIndexAsync();

            Assert.NotEqual(res0, res1);
            Assert.Equal(res1, res2);
        }
Ejemplo n.º 2
0
        internal bool Init(string sourceFolder, CountAnalytics ca)
        {
            object[] returned = DataHandler.ProcessInputTest(sourceFolder);
            if (returned == null)
            {
                System.Windows.MessageBox.Show(" Invalid map folder");
                errorDuringImport = true;
                return(false);
            }

            List <string> labels = (List <string>)returned[0];

            Cell[,] map = (Cell[, ])returned[1];

            lh = new LabelingHandler(labels);
            kn = new KohonenNetwork(lh, map, this);

            Logo_Init();

            ScrollLeft_Init();
            ScrollRight_Init();

            countA = ca;

            return(true);
        }
Ejemplo n.º 3
0
        public void Learning()
        {
            var networkConfig  = new NetworkConfiguration(3, 5);
            var learningConfig = new LearningConfiguration
            {
                ThetaFactorPerEpoch  = 0.95,
                DefaultRepeatsNumber = 25
            };
            var network  = new KohonenNetwork <Logistic>(networkConfig);
            var learning = new UnsupervisedLearning(network, learningConfig);

            var inputs = _getInputs();

            learning.Learn(inputs);

            network.Input(_control[0]);
            var res0 = network.GetOutputIndex();

            network.Input(_control[1]);
            var res1 = network.GetOutputIndex();

            network.Input(_control[2]);
            var res2 = network.GetOutputIndex();

            Assert.NotEqual(res0, res1);
            Assert.Equal(res1, res2);
        }
Ejemplo n.º 4
0
        public void Analyze(IForecastingDataSets datasets)
        {
            if (ModelStartRunning != null)
            {
                ModelStartRunning(this, new ComponentRunEventArgs(datasets));
            }
            int learningRadius = Math.Max(mSOMParameter.LayerWidth, mSOMParameter.LayerHeight) / 2;

            KohonenLayer inputLayer  = new KohonenLayer(datasets.InputData[0].Length);
            KohonenLayer outputLayer = new KohonenLayer(new Size(mSOMParameter.LayerWidth, mSOMParameter.LayerHeight),
                                                        mSOMParameter.NeighborhoodFunction, mSOMParameter.Topology);
            KohonenConnector connector = new KohonenConnector(inputLayer, outputLayer);

            connector.Initializer = new RandomFunction(0, 100);
            outputLayer.SetLearningRate(mSOMParameter.LearningRate, mSOMParameter.FinalLearningRate);
            outputLayer.IsRowCircular    = mSOMParameter.IsRowCircular;
            outputLayer.IsColumnCircular = mSOMParameter.IsColumnCircular;
            mNetwork = new KohonenNetwork(inputLayer, outputLayer);

            mNetwork.EndEpochEvent += new TrainingEpochEventHandler(
                delegate(object senderNetwork, TrainingEpochEventArgs args)
            {
                if (ModelRunningEpoch != null)
                {
                    ModelRunningEpoch(this, new ComponentRunEpochEventArgs(args.TrainingIteration));
                }
            });
            mTrainingSet = ForecastingDataSets.ConvertToUnSupervisedTrainingSet(datasets);
            mNetwork.Learn(mTrainingSet, mSOMParameter.Iterations);

            if (ModelFinishRunning != null)
            {
                ModelFinishRunning(this, new ComponentRunEventArgs(datasets));
            }
        }
Ejemplo n.º 5
0
        public void NodesTest()
        {
            var network0 = new KohonenNetwork <Gaussian>(5, 2, false);
            var network1 = new KohonenNetwork <Gaussian>(5, 2, true);

            Assert.Equal(5, network0.InputLayer.Nodes.Count());
            Assert.Equal(6, network1.InputLayer.Nodes.Count());
            Assert.Equal(2, network0.OutputLayer.Nodes.Count());
        }
Ejemplo n.º 6
0
        public void LayersTest()
        {
            var network = new KohonenNetwork <Gaussian>(5, 2);

            Assert.Equal(1, network.Layers.Count());
            Assert.Throws <NotSupportedException>(() => network.Layers.Add(new Layer()));
            Assert.Equal(1, network.Layers.Count());
            Assert.True(network.Layers != network.Layers);
        }
Ejemplo n.º 7
0
        public void KohonenNetworkIsUntested()
        {
            KohonenNetwork net = new KohonenNetwork(2, 2);

            for (int ep = 0; ep < 100; ep++)
            {
                net.Study(new int[] { 0, 0 }, 1, 1);
                net.Study(new int[] { 1, 1 }, 1, 1);
                net.Study(new int[] { 1, 0 }, 0, 1);
                net.Study(new int[] { 0, 1 }, 0, 1);
            }


            var result = new int[]
            {
                net.Handle(0, 0),
                net.Handle(1, 1),
                net.Handle(0, 1),
                net.Handle(1, 0),
            };
        }
Ejemplo n.º 8
0
        /// <summary>
        /// Unsupervised Learning Engine
        /// </summary>
        /// <param name="config">Path to the configuration file</param>
        static async void Main(string config)
        {
            Configuration configuration = Configuration.FromYamlFile(config);

            switch (configuration.Network)
            {
            case "hopfield":
                //Parse network patterns
                var patterns = HopfieldUtils.ParsePatterns(configuration.Patterns, configuration.PatternRows, configuration.PatternColumns);
                //Initialize Hopfield network
                var network = new HopfieldNetwork(patterns);
                //Test network and get closest pattern
                Console.WriteLine($"Testing Pattern: {configuration.TestPattern}; Noise: {configuration.Noise}\n");
                var result = HopfieldUtils.Test(network, patterns, configuration.TestPattern, configuration.Noise);
                //Print network intermediate states
                PrintPatterns(result, configuration.PatternColumns);
                //Check if network got to test pattern successfully
                if (patterns.IndexOf(result[result.Count - 1]) == configuration.TestPattern)
                {
                    Console.WriteLine("Found pattern!\n");
                }
                else
                {
                    Console.WriteLine("Could not find pattern.\n");
                }

                if (configuration.Metrics == "all" || configuration.Metrics == "noise")
                {
                    //Get network noise accuracy metrics
                    await HopfieldUtils.SaveNoiseAccuracyMetrics(network, patterns, configuration.Repetitions, configuration.TestPattern);

                    Console.WriteLine($"Noise metrics stored in accuracy.csv [Repetitions: {configuration.Repetitions}]");
                }
                if (configuration.Metrics == "all" || configuration.Metrics == "energy")
                {
                    //Get network energy metrics
                    await HopfieldUtils.SaveEnergyMetrics(network, patterns, configuration.Repetitions, configuration.TestPattern, configuration.Noise);

                    Console.WriteLine("Energy metrics stored in energy_{i}.csv" + $" [Repetitions: {configuration.Repetitions}]");
                }
                break;

            case "oja":
                var networkOja = new OjaNetwork(configuration.LearningRate, configuration.Epochs, ParseCsv(configuration.Csv));
                var W          = networkOja.TrainOja();
                Console.WriteLine(W);
                break;

            case "kohonen":
                var values  = ParseCsv(configuration.Csv);
                var kohonen = new KohonenNetwork(values, configuration.KohonenK, configuration.WeightEntries);
                kohonen.Train(configuration.Epochs);
                var i         = 0;
                var countries = new string[] { "Austria", "Belgium", "Bulgaria", "Croatia", "Czech", "Denmark", "Estonia", "Finland", "Germany", "Greece", "Hungary", "Iceland", "Ireland", "Italy", "Latvia", "Lithuania", "Luxembourg", "Netherlands", "Norway", "Poland", "Portugal", "Slovakia", "Slovenia", "Spain", "Sweden", "Switzerland", "Ukraine", "United Kingdom" };
                var groups    = new List <(int x, int y)>();
                foreach (var city in kohonen.values)
                {
                    (int x, int y)group = kohonen.Classify(city);
                    Console.WriteLine($"{countries[i++]}: {group.x},{group.y}");
                    groups.Add(group);
                }
                await File.WriteAllLinesAsync("classification.csv", groups.Select((v, index) => $"{v.x},{v.y}"));

                var weights = from Vector <double> weight in kohonen.W select weight;
                await File.WriteAllLinesAsync("weights.csv", weights.Select((v, index) => v.Aggregate("", (str, n) => str + n + ",")));

                var distances = new List <double>();
                for (i = 0; i < kohonen.N; i++)
                {
                    for (var j = 0; j < kohonen.N; j++)
                    {
                        int imin = Math.Max(0, i - 1);
                        int imax = Math.Min(kohonen.N - 1, i + 1);
                        int jmin = Math.Max(0, j - 1);
                        int jmax = Math.Min(kohonen.N - 1, j + 1);

                        var neuronDist = new List <double>();
                        for (int x = imin; x <= imax; x++)
                        {
                            for (int y = jmin; y <= jmax; y++)
                            {
                                if ((i != x || j != y) && kohonen.Distance((i, j), (x, y)) <= 1)
                                {
                                    neuronDist.Add(kohonen.Distance(kohonen.W[i, j], kohonen.W[x, y]));
                                }
                            }
                        }
                        distances.Add(neuronDist.Average());
                    }
                }
                await File.WriteAllLinesAsync("distances.csv", distances.Select(v => v.ToString()));

                break;
            }
        }
Ejemplo n.º 9
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="learning"></param>
 /// <param name="criticalRange">Critical range for decide to start training or add a new neuron</param>
 public Organizing(KohonenNetwork <TFunc> network, double criticalRange, int maxOutputNeurons = int.MaxValue)
 {
     _network       = network;
     _criticalRange = criticalRange;
     _maxNeurons    = maxOutputNeurons;
 }
Ejemplo n.º 10
0
        void Solve()
        {
            CrowNetP NetP = new CrowNetP();

            if (netUP.netType == "som")
            {
                #region self organizing maps

                #region prepare and assign
                trainingSet.Clear();
                int trainVectorDimension = 3;
                if (trainDataArePoints)
                {
                    for (int i = 0; i < pointsList.Count; i++)
                    {
                        trainingSet.Add(new TrainingSample(new double[] { pointsList[i].Value.X, pointsList[i].Value.Y, pointsList[i].Value.Z }));
                    }
                }
                else
                {
                    trainVectorDimension = trainingVectorTree.Branches[0].Count;
                    trainingSet          = new TrainingSet(trainVectorDimension);
                    for (int i = 0; i < trainingVectorTree.Branches.Count; i++)
                    {
                        double[] values = new double[trainVectorDimension];

                        for (int j = 0; j < trainVectorDimension; j++)
                        {
                            values[j] = trainingVectorTree.Branches[i][j].Value;
                        }

                        trainingSet.Add(new TrainingSample(values));
                    }
                }


                ///  process
                ///  start learning

                int learningRadius = Math.Max(layerWidth, layerHeight) / 2;

                INeighborhoodFunction neighborhoodFunction = new GaussianFunction(learningRadius, netUP.neighborDistance) as INeighborhoodFunction;
                if (neighborhood)
                {
                    neighborhoodFunction = new MexicanHatFunction(learningRadius) as INeighborhoodFunction;
                }

                LatticeTopology topology = LatticeTopology.Rectangular;
                if (latticeTopology)
                {
                    topology = LatticeTopology.Hexagonal;
                }

                KohonenLayer     inputLayer  = new KohonenLayer(trainVectorDimension);
                KohonenLayer     outputLayer = new KohonenLayer(new Size(layerWidth, layerHeight), neighborhoodFunction, topology);
                KohonenConnector connector   = new KohonenConnector(inputLayer, outputLayer);
                connector.Initializer = randomizer;

                outputLayer.SetLearningRate(learningRate, 0.05d);
                outputLayer.IsRowCircular    = isCircularRows;
                outputLayer.IsColumnCircular = isCircularColumns;
                network = new KohonenNetwork(inputLayer, outputLayer);
                network.useRandomTrainingOrder = opt.UseRandomTraining;
                #endregion

                #region delegates
                network.BeginEpochEvent += new TrainingEpochEventHandler(
                    delegate(object senderNetwork, TrainingEpochEventArgs args)
                {
                    #region TrainingCycle
                    if (network == null || !GO)
                    {
                        return;
                    }


                    int iPrev     = layerWidth - 1;
                    allValuesTree = new GH_Structure <GH_Number>();
                    for (int i = 0; i < layerWidth; i++)
                    {
                        for (int j = 0; j < layerHeight; j++)
                        {
                            IList <ISynapse> synapses = (network.OutputLayer as KohonenLayer)[i, j].SourceSynapses;
                            double x = synapses[0].Weight;
                            double y = synapses[1].Weight;
                            double z = synapses[2].Weight;

                            for (int k = 0; k < trainVectorDimension; k++)
                            {
                                allValuesTree.Append(new GH_Number(synapses[k].Weight), new GH_Path(i, j));
                            }

                            rowX[j][i]    = x;
                            rowY[j][i]    = y;
                            rowZ[j][i]    = z;
                            columnX[i][j] = x;
                            columnY[i][j] = y;
                            columnZ[i][j] = z;

                            if (j % 2 == 1)
                            {
                                hexagonalX[i][j] = x;
                                hexagonalY[i][j] = y;
                                hexagonalZ[i][j] = z;
                            }
                            else
                            {
                                hexagonalX[iPrev][j] = x;
                                hexagonalY[iPrev][j] = y;
                                hexagonalZ[iPrev][j] = z;
                            }
                        }
                        iPrev = i;
                    }

                    if (isCircularRows)
                    {
                        for (int i = 0; i < layerHeight; i++)
                        {
                            rowX[i][layerWidth] = rowX[i][0];
                            rowY[i][layerWidth] = rowY[i][0];
                            rowZ[i][layerWidth] = rowZ[i][0];
                        }
                    }

                    if (isCircularColumns)
                    {
                        for (int i = 0; i < layerWidth; i++)
                        {
                            columnX[i][layerHeight]    = columnX[i][0];
                            columnY[i][layerHeight]    = columnY[i][0];
                            columnZ[i][layerHeight]    = columnZ[i][0];
                            hexagonalX[i][layerHeight] = hexagonalX[i][0];
                            hexagonalY[i][layerHeight] = hexagonalY[i][0];
                            hexagonalZ[i][layerHeight] = hexagonalZ[i][0];
                        }
                    }

                    Array.Clear(isWinner, 0, layerHeight * layerWidth);

                    #endregion
                    NetP = new CrowNetP("som", layerWidth, layerHeight, isCircularRows, isCircularColumns, latticeTopology, neighborhood, isWinner, rowX, rowY, rowZ, columnX, columnY, columnZ, hexagonalX, hexagonalY, hexagonalZ, allValuesTree);
                    counter++;
                });

                network.EndSampleEvent += new TrainingSampleEventHandler(
                    delegate(object senderNetwork, TrainingSampleEventArgs args)
                {
                    isWinner[network.Winner.Coordinate.X, network.Winner.Coordinate.Y] = true;
                });
                #endregion

                #endregion
            }

            network.Learn(trainingSet, cycles);
        }