예제 #1
0
        public void to_coordinates()
        {
            ushort x; ushort n = 4;
            ushort y; ushort m = 5;

            var index = 14;

            var k = 0;

            for (int i = 0; i < n; i++)
            {
                for (int j = 0; j < m; j++)
                {
                    if (k == index)
                    {
                        x = (ushort)i;
                        y = (ushort)j;
                    }
                    k++;
                }
            }

            var som         = new SelfOrganizingMap(1, new ushort[] { n, m }, 1, 1);
            var coordinates = som.ToCoordinates(index);
        }
예제 #2
0
        public void dense_features_simple()
        {
            var instance1 = new[] { 20f, 30f, 0f, 0f };
            var instance2 = new[] { 20f, 30f, 0f, 0f };
            var instance3 = new[] { 0f, 0f, 10f, 20f };
            var instance4 = new[] { 0f, 0f, 10f, 20f };

            var inputFeaturesTypes = new InputFeatureTypes[] {
                InputFeatureTypes.Ordinal,
                InputFeatureTypes.Ordinal,
                InputFeatureTypes.Ordinal,
                InputFeatureTypes.Ordinal
            };

            var ordinalDenseSet = new InstanceRepresentation(inputFeaturesTypes, sparse: false);

            ordinalDenseSet.AddInstance(instance1);
            ordinalDenseSet.AddInstance(instance2);
            ordinalDenseSet.AddInstance(instance3);
            ordinalDenseSet.AddInstance(instance4);

            var som = new SelfOrganizingMap(10, new ushort[] { 20, 20 }, 100, 3);

            var categories = som.Train(ordinalDenseSet);
        }
예제 #3
0
        /// <summary>
        ///     Calculate the output of the SOM, for each output neuron.  Typically,
        ///     you will use the classify method instead of calling this method.
        /// </summary>
        /// <param name="som">The input pattern.</param>
        /// <param name="input">The output activation of each output neuron.</param>
        /// <returns></returns>
        private double[] Compute(SelfOrganizingMap som, double[] input)
        {
            var result = new double[som.OutputCount];

            var matrixRows = som.Weights.ToRowArrays();

            for (var i = 0; i < som.OutputCount; i++)
            {
                var optr = matrixRows[i];

                Matrix matrixA = DenseMatrix.Create(input.Length, 1, 0);
                for (var j = 0; j < input.Length; j++)
                {
                    matrixA[0, j] = input[j];
                }


                Matrix matrixB = DenseMatrix.Create(1, input.Length, 0);
                for (var j = 0; j < optr.Length; j++)
                {
                    matrixB[0, j] = optr[j];
                }

                result[i] = VectorAlgebra.DotProduct(matrixA.ToRowArrays()[0], matrixB.ToRowArrays()[0]);
            }

            return(result);
        }
예제 #4
0
        private void SomCreate_Click(object sender, RoutedEventArgs eventArgs)
        {
            try
            {
                int neuronCount = Int32.Parse(SomNeuronCount.Text);
                NumberOfNeurons = neuronCount;
                int    dimensions        = Int32.Parse(SomDimensions.Text);
                string choice            = SomDistance.Text;
                IDistanceCalculator dist = new EuclideanDistance();
                if (choice == ManhattanChoice)
                {
                    dist = new ManhattanDistance();
                }
                double minWeights = double.Parse(SomWeightsMin.Text, CultureInfo.InvariantCulture);
                double maxWeights = double.Parse(SomWeightsMax.Text, CultureInfo.InvariantCulture);
                Network = new SelfOrganizingMap(neuronCount, new NeuronRandomRectangularInitializer(minWeights, maxWeights, dimensions), dist);

                if (AlgorithmSelectBox.Text == SomChoice)
                {
                    Network = new SelfOrganizingMap(neuronCount, new NeuronRandomRectangularInitializer(minWeights, maxWeights, dimensions), new EuclideanDistance());
                }
                else
                {
                    Network = new KMeansNetwork(neuronCount, new NeuronRandomRectangularInitializer(minWeights, maxWeights, dimensions), new EuclideanDistance());
                }

                SomInformation.Text =
                    $"Successfully created {AlgorithmSelectBox.Text}\n NC: {neuronCount}; D: {choice}; Min.: {minWeights}; Max.: {maxWeights}";
            }
            catch (Exception e)
            {
                MessageBox.Show($"Something went wrong while parsing. Original message: {e.Message}");
            }
        }
예제 #5
0
        private void doSOMThreaded()
        {
            int    numRuns      = (int)numericUpDown4.Value;
            int    numClust     = (int)numericUpDown3.Value;
            int    numEpochs    = (int)numericUpDown5.Value;
            double learningRate = (double)numericUpDown6.Value;

            String folder = pointSetFile.Substring(0, pointSetFile.LastIndexOf('\\'));

            openFileDialog2.InitialDirectory = folder;

            PointSet somPoints = new PointSet(pointSetFile);

            List <int> dimensions = new List <int>();

            dimensions.Add(numClust); // 1 dimensional "String"
            for (int i = 0; i < numRuns; i++)
            {
                SelfOrganizingMap algo = new SelfOrganizingMap(somPoints, dimensions, numEpochs, learningRate);
                //while (!algo.doneExecuting())
                //    algo.Epoch();
                algo.runLargeEpochs(0.5, 2);
                algo.runLargeEpochs(0.15, 1);
                algo.runLargeEpochs(0.05, 3);

                String newCluster = folder + "/" + pointSetFileShort.Substring(0, pointSetFileShort.LastIndexOf('.')) + "_som" + numClust + "_" + i;
                algo.GetClusterLazy().SavePartition(newCluster, pointSetFile.Substring(pointSetFile.LastIndexOf("\\") + 1), "");
            }
            MessageBox.Show("Complete!");
        }
예제 #6
0
        private SelfOrganizingMap CreateNetwork()
        {
            var result = new SelfOrganizingMap(3, WIDTH * HEIGHT);

            result.Reset();
            return(result);
        }
예제 #7
0
        private void btnBeginTraining_Click(object sender, EventArgs e)
        {
            int inputCount  = OCRForm.DOWNSAMPLE_HEIGHT * OCRForm.DOWNSAMPLE_WIDTH;
            int letterCount = this.letters.Items.Count;

            this.trainingSet = new double[letterCount][];
            int index = 0;

            foreach (char ch in this.letterData.Keys)
            {
                this.trainingSet[index] = new double[inputCount];
                bool[] data = this.letterData[ch];
                for (int i = 0; i < inputCount; i++)
                {
                    this.trainingSet[index][i] = data[i] ? 0.5 : -0.5;
                }
                index++;
            }

            network = new SelfOrganizingMap(inputCount, letterCount, NormalizationType.Z_AXIS);

            this.ThreadProc();

            //ThreadStart ts = new ThreadStart(ThreadProc);
            //Thread thread = new Thread(ts);
            //thread.Start();
        }
예제 #8
0
        public void sparse_features_medium()
        {
            // Cluster generator settings
            var maxRadius    = 100;
            var minRadius    = 10;
            var clusterCount = 10;

            // Data size
            var featureDim = 100;
            var obsCount   = 2000;

            var inputFeaturesTypes = new InputFeatureTypes[featureDim];

            for (var i = 0; i < featureDim; i++)
            {
                inputFeaturesTypes[i] = InputFeatureTypes.Ordinal;
            }

            var sparseOrdinalSet = new InstanceRepresentation(inputFeaturesTypes, sparse: true);

            var clusterGenerator  = new SyntheticDataGenerator(maxRadius, minRadius, obsCount, clusterCount, featureDim);
            var trueClusterLabels = new List <int>();

            using (var obsGetter = clusterGenerator.GenerateClusterObservations().GetEnumerator())
            {
                var isNextObservation = obsGetter.MoveNext();

                while (isNextObservation)
                {
                    var obs     = obsGetter.Current.Item2;
                    var cluster = obsGetter.Current.Item1;
                    trueClusterLabels.Add(cluster);
                    sparseOrdinalSet.AddInstance(obs);

                    isNextObservation = obsGetter.MoveNext();
                }
            }

            var som        = new SelfOrganizingMap(10, new ushort[] { (ushort)(clusterCount + 5), (ushort)(clusterCount + 5) }, 1000, 3);
            var categories = som.Train(sparseOrdinalSet);

            var metricsGenerator = new MetricsGenerator();

            metricsGenerator.Add(Metrics.Purity);
            for (var i = 0; i < categories.Length; i++)
            {
                metricsGenerator.AddResult(categories[i], trueClusterLabels[i]);
                metricsGenerator.UpdateMetrics();
            }

            var purity = metricsGenerator.GetMetric(Metrics.Purity);

            Assert.True(purity + Epsilon - 0.6 > 0);
        }
예제 #9
0
        private void InitializeSom()
        {
            var startedRange = new CoordRange(200, 300, 250, 350);
            var nodes        = _randomizer.RandomizeStartedNodesInTriangle(_neuronsCount, startedRange, _areaSize);
            var points       = Translator.TranslateNodesToPoints(nodes);

            _drawer.DrawPoints(points, _pointsDiameter, _pointsColor, PointType.Filled);
            _drawer.DrawLines(points, _lineWidth, _linesColor);

            _som = new SelfOrganizingMap(nodes);
        }
예제 #10
0
        public static void Main()
        {
            //var data = new string[]
            //{
            //    "Data/2010.txt",
            //    "Data/2011.txt",
            //    "Data/2012.txt",
            //    "Data/2013.txt",
            //    "Data/2014.txt",
            //    "Data/2015.txt",
            //    "Data/2016.txt",
            //    "Data/2017.txt",
            //    "Data/2018.txt"
            //};

            var settings = Settings1.Default;

            Menu(settings);

            //var epochData = new List<double[]>(data.Length);
            //for (var i = 0; i < data.Length - 1; i++)
            //    epochData.Add(Convert(File.ReadAllLines(data[i])).ToArray());

            var epochData = ConvertIris(File.ReadAllLines("Data/iris.data"), out var checkIris);

            var network = new SelfOrganizingMap(epochData[0].Length, settings.NeruonsCount);
            var teacher = new SelfOrganizingMapTeacher(network);

            network.Randomize();
            var     res        = 10d;
            var     maxres     = double.MaxValue;
            Network minNetwork = null;
            var     iterations = settings.IterationsCount;

            while (res > 1d && teacher.Iteration < iterations)
            {
                res = teacher.RunEpoch(epochData.ToArray(), null);
                if (res < maxres)
                {
                    maxres     = res;
                    minNetwork = network.Copy();
                }
                Console.WriteLine(res);
            }

            //epochData.Add(Convert(File.ReadLines(data[data.Length - 1])).ToArray());

            Console.WriteLine($"min value: {maxres}");
            using (var stream = new FileStream("image.png", FileMode.Create))
                Draw(minNetwork[0].Neurons, checkIris.ToArray()).Save(stream, System.Drawing.Imaging.ImageFormat.Png);

            System.Diagnostics.Process.Start("image.png");
        }
        public void ShouldCalculateEuclidianDistance()
        {
            var    decimals        = 2;
            double realDistance    = 1.105;
            var    abstractNetwork = new SelfOrganizingMap();

            abstractNetwork.InputAttributes.Add(new InputAttributeBase {
                InputAttributeNumber = 0
            });
            abstractNetwork.InputAttributes.Add(new InputAttributeBase {
                InputAttributeNumber = 1
            });
            abstractNetwork.InputAttributes.Add(new InputAttributeBase {
                InputAttributeNumber = 2
            });

            abstractNetwork.Weights.Add(new WeightBase {
                InputAttributeNumber = 0, NeuronNumber = 0, Value = 0.1
            });
            abstractNetwork.Weights.Add(new WeightBase {
                InputAttributeNumber = 1, NeuronNumber = 0, Value = 0.4
            });
            abstractNetwork.Weights.Add(new WeightBase {
                InputAttributeNumber = 2, NeuronNumber = 0, Value = 0.5
            });

            abstractNetwork.Neurons.Add(new NeuronBase {
                NeuronNumber = 0
            });

            var inputVector = new List <InputAttributeValue>
            {
                new InputAttributeValue {
                    InputAttributeNumber = 0, Value = 1
                },
                new InputAttributeValue {
                    InputAttributeNumber = 1, Value = 0
                },
                new InputAttributeValue {
                    InputAttributeNumber = 2, Value = 0
                },
            };

            double distance = abstractNetwork.GetEuclideanDistance(abstractNetwork.Neurons[0], inputVector);

            Assert.AreEqual(Math.Round(realDistance, decimals), Math.Round(distance, decimals));
        }
예제 #12
0
        void ThreadProc()
        {
            Random rand = new Random();

            // build the training set
            this.input = new double[SAMPLE_COUNT][];

            for (int i = 0; i < SAMPLE_COUNT; i++)
            {
                this.input[i] = new double[INPUT_COUNT];
                for (int j = 0; j < INPUT_COUNT; j++)
                {
                    this.input[i][j] = rand.NextDouble();
                }
            }

            // build and train the neural network
            this.net = new SelfOrganizingMap(INPUT_COUNT, OUTPUT_COUNT,
                                             NormalizationType.MULTIPLICATIVE);
            TrainSelfOrganizingMap train = new TrainSelfOrganizingMap(
                this.net, this.input, TrainSelfOrganizingMap.LearningMethod.SUBTRACTIVE, 0.5);

            train.Initialize();
            double lastError  = Double.MaxValue;
            int    errorCount = 0;

            while (errorCount < 10)
            {
                train.Iteration();
                this.retry++;
                this.totalError = train.TotalError;
                this.bestError  = train.BestError;
                this.Invalidate();

                if (this.bestError < lastError)
                {
                    lastError  = this.bestError;
                    errorCount = 0;
                }
                else
                {
                    errorCount++;
                }
            }
        }
예제 #13
0
        /// <summary>
        ///     Create an instance of competitive training.
        /// </summary>
        /// <param name="network">The network to train.</param>
        /// <param name="learningRate">The learning rate, how much to apply per iteration.</param>
        /// <param name="training">The training set (unsupervised).</param>
        /// <param name="neighborhood">The neighborhood function to use.</param>
        public BasicTrainSOM(SelfOrganizingMap network, double learningRate,
                             IList <BasicData> training, INeighborhoodFunction neighborhood)
        {
            _neighborhood      = neighborhood;
            _training          = training;
            LearningRate       = learningRate;
            _network           = network;
            _inputNeuronCount  = network.InputCount;
            _outputNeuronCount = network.OutputCount;
            ForceWinner        = false;
            _error             = 0;

            // setup the correction matrix
            _correctionMatrix = DenseMatrix.Create(_outputNeuronCount, _inputNeuronCount, 0);

            // create the BMU class
            _bmuUtil = new BestMatchingUnit(network);
        }
예제 #14
0
        public SOMColors()
        {
            InitializeComponent();

            network  = CreateNetwork();
            gaussian = new NeighborhoodRBF(RBFEnum.Gaussian, WIDTH, HEIGHT);
            train    = new BasicTrainSOM(network, 0.01, null, gaussian);

            train.ForceWinner = false;
            samples           = AIFH.Alloc2D <double>(15, 3);

            for (int i = 0; i < 15; i++)
            {
                samples[i][0] = rnd.NextDouble(-1, 1);
                samples[i][1] = rnd.NextDouble(-1, 1);
                samples[i][2] = rnd.NextDouble(-1, 1);
            }

            train.SetAutoDecay(100, 0.8, 0.003, 30, 5);
        }
예제 #15
0
        /// <summary>
        ///     Calculate the output of the SOM, for each output neuron.  Typically,
        ///     you will use the classify method instead of calling this method.
        /// </summary>
        /// <param name="som">The input pattern.</param>
        /// <param name="input">The output activation of each output neuron.</param>
        /// <returns></returns>
        private double[] Compute(SelfOrganizingMap som, double[] input)
        {
            var result = new double[som.OutputCount];

            var matrixRows = som.Weights.ToRowArrays();
            for (var i = 0; i < som.OutputCount; i++)
            {
                var optr = matrixRows[i];

                Matrix matrixA = DenseMatrix.Create(input.Length, 1, 0);
                for (var j = 0; j < input.Length; j++)
                {
                    matrixA[0, j] = input[j];
                }


                Matrix matrixB = DenseMatrix.Create(1, input.Length, 0);
                for (var j = 0; j < optr.Length; j++)
                {
                    matrixB[0, j] = optr[j];
                }

                result[i] = VectorAlgebra.DotProduct(matrixA.ToRowArrays()[0], matrixB.ToRowArrays()[0]);
            }

            return result;
        }
예제 #16
0
        /// <summary>
        ///     Create an instance of competitive training.
        /// </summary>
        /// <param name="network">The network to train.</param>
        /// <param name="learningRate">The learning rate, how much to apply per iteration.</param>
        /// <param name="training">The training set (unsupervised).</param>
        /// <param name="neighborhood">The neighborhood function to use.</param>
        public BasicTrainSOM(SelfOrganizingMap network, double learningRate,
            IList<BasicData> training, INeighborhoodFunction neighborhood)
        {
            _neighborhood = neighborhood;
            _training = training;
            LearningRate = learningRate;
            _network = network;
            _inputNeuronCount = network.InputCount;
            _outputNeuronCount = network.OutputCount;
            ForceWinner = false;
            _error = 0;

            // setup the correction matrix
            _correctionMatrix = DenseMatrix.Create(_outputNeuronCount, _inputNeuronCount, 0);

            // create the BMU class
            _bmuUtil = new BestMatchingUnit(network);
        }