예제 #1
0
        public void FeedForwardNeuralNetwork_LoadModelSparseMatrix()
        {
            var target = new FeedForwardNeuralNetwork <double>(
                new[] { 2L, 3L, 2L });

            var parser = new DoubleParser <string>();
            var matrix = TestsHelper.ReadMatrix(
                5,
                5,
                "[[-1.0, 1.0, 0.5, 0, 0], [1.0, -1.0, 0.5, 0, 0], [0, 0, 0, -1.0, 2.0], [0, 0, 0, 0.5, -1.5], [0, 0, 0, 1.0, -0.5]]",
                (i, j) => new SparseDictionaryMatrix <double>(i, j, 0),
                parser,
                true);
            var vector = TestsHelper.ReadVector(
                5,
                "[-1.0, 0.0, 1.0, -0.5, 0.5]",
                new SparseDictionaryMathVectorFactory <double>(),
                parser,
                true);
            var model = new NeuralNetworkModel <double, SparseDictionaryMatrix <double>, IMathVector <double> >(
                matrix,
                vector);

            target.LoadModelSparse <SparseDictionaryMatrix <double>, ILongSparseMatrixLine <double>, IMathVector <double> >(
                model);

            this.AssertTargetFromMatrix(
                model,
                target);
        }
예제 #2
0
        public void Initialize()
        {
            IForecastingModel model = null;

            Models = new List <IForecastingModel>();

            AnnModelParameter annPara = new AnnModelParameter();

            model = new NeuralNetworkModel(annPara);
            Models.Add(model);

            Heiflow.AI.SVM.Parameter p = new Heiflow.AI.SVM.Parameter();
            model = new SVMModel(p);
            Models.Add(model);

            ModelParameter mp = new ModelParameter();

            model = new MLRModel(mp);
            Models.Add(model);

            Recognizers = new List <IRecognizer>();

            foreach (var mm in Models)
            {
                var recognizer = new ImageRecognizer(mm, _IImageSetsBuilder, _IColorClassification);
                Recognizers.Add(recognizer);
            }
        }
예제 #3
0
        public static void ToTreeView(TreeView t, NeuralNetworkModel nn)
        {
            t.Nodes.Clear();

            TreeNode root = new TreeNode("NeuralNetwork");

            nn.Layers.ForEach((layer) =>
            {
                TreeNode lnode = new TreeNode("Layer");

                layer.Neurons.ForEach((neuron) =>
                {
                    TreeNode nnode = new TreeNode("Neuron");
                    nnode.Nodes.Add("Bias: " + neuron.Bias.ToString());
                    nnode.Nodes.Add("Delta: " + neuron.Delta.ToString());
                    nnode.Nodes.Add("Value: " + neuron.Value.ToString());

                    neuron.Dendrites.ForEach((dendrite) =>
                    {
                        TreeNode dnode = new TreeNode("Dendrite");
                        dnode.Nodes.Add("Weight: " + dendrite.Weight.ToString());

                        nnode.Nodes.Add(dnode);
                    });

                    lnode.Nodes.Add(nnode);
                });

                root.Nodes.Add(lnode);
            });

            //root.ExpandAll();
            t.Nodes.Add(root);
        }
예제 #4
0
 public NeuralNetworkFacade(double learningrate, int[] layers, char[] chardict)
 {
     this.neuralNet        = new NeuralNetworkModel(learningrate, layers);
     this.validationReport = new ValidationReport();
     this.chardict         = chardict;
     this.inputnodes       = layers[0];
 }
예제 #5
0
        /// <summary>
        /// Поиск аномалий на основе статистической модели.
        /// </summary>
        /// <returns>Список (возможно, пустой) сообщений об аномалиях.</returns>
        public List <string> SearchAnomaliesByNeuralNetwork()
        {
            var messages = new List <string>();

            _neuralNetworkResult = new List <double>();
            for (var i = 0; i < Samples.Count; i++)
            {
                var output = NeuralNetworkModel.Calculate(Samples[i])[0];
                _neuralNetworkResult.Add(output);
                if (output > 0.5)
                {
                    var probability = (output + 1) / 2;
                    messages.Add($"№{i + 1}: обнаружена аномалия с вероятностью {probability}.");
                }
            }

            return(messages);
        }
예제 #6
0
        public static void ToPictureBox(PictureBox p, NeuralNetworkModel nn, int X, int Y)
        {
            int neuronWidth    = 30;
            int neuronDistance = 50;
            int layerDistance  = 50;
            int fontSize       = 8;

            Bitmap   b = new Bitmap(p.Width, p.Height);
            Graphics g = Graphics.FromImage(b);

            g.FillRectangle(Brushes.White, g.ClipBounds);

            int y = Y;

            for (int l = 0; l < nn.Layers.Count; l++)
            {
                Layer layer = nn.Layers[l];

                int x = X - (neuronDistance * (layer.Neurons.Count / 2));

                for (int n = 0; n < layer.Neurons.Count; n++)
                {
                    Neuron neuron = layer.Neurons[n];

                    for (int d = 0; d < neuron.Dendrites.Count; d++)
                    {
                        // TO DO: optionally draw dendrites between neurons
                    }
                    ;

                    g.FillEllipse(Brushes.WhiteSmoke, x, y, neuronWidth, neuronWidth);
                    g.DrawEllipse(Pens.Gray, x, y, neuronWidth, neuronWidth);
                    g.DrawString(neuron.Value.ToString("0.00"), new Font("Arial", fontSize), Brushes.Black, x + 2, y + (neuronWidth / 2) - 5);

                    x += neuronDistance;
                }
                ;

                y += layerDistance;
            }
            ;

            p.Image = b;
        }
예제 #7
0
        /// <summary>
        /// Verifica se o modelo corresponde ao que se encontra armazenado
        /// na rede.
        /// </summary>
        /// <typeparam name="C">O tipo dos objectos que constituem os coeficientes.</typeparam>
        /// <typeparam name="M">O tipo dos objectos que constituem as matrizes.</typeparam>
        /// <typeparam name="V">O tipo dos objectos que constituem os vectores.</typeparam>
        /// <param name="expected">O model com os valores esperados.</param>
        /// <param name="actual">A rede que contém os valores a comparar.</param>
        private void AssertTargetFromMatrix <C, M, V>(
            NeuralNetworkModel <C, M, V> expected,
            FeedForwardNeuralNetwork <C> actual)
            where M : ILongMatrix <C>
            where V : IVector <C>
        {
            var actualTresholds   = actual.InternalTresholds;
            var expectedTresholds = expected.Tresholds;

            Assert.AreEqual(expectedTresholds.LongLength, actualTresholds.LongLength);
            for (var i = 0; i < actualTresholds.LongLength; ++i)
            {
                Assert.AreEqual(expectedTresholds[i], actualTresholds[i]);
            }

            var expectedMatrix = expected.WeightsMatrix;
            var actualMatrix   = actual.InternalWeights;

            Assert.AreEqual(expectedMatrix.GetLength(0), actualMatrix.LongLength);

            var pointer  = 0;
            var currCol  = 0L;
            var schema   = actual.Schema;
            var currLine = schema[pointer + 1];

            for (var i = 0; i < actualMatrix.LongLength; ++i)
            {
                if (i == currLine)
                {
                    currCol  += schema[pointer++];
                    currLine += schema[pointer + 1];
                }

                var actualLine = actualMatrix[i];
                for (var j = 0; j < actualLine.LongLength; ++j)
                {
                    var actualVal   = actualLine[j];
                    var expectedVal = expectedMatrix[i, currCol + j];
                    Assert.AreEqual(expectedVal, actualVal);
                }
            }
        }
예제 #8
0
        public void FeedFrowardNeuralNetwork_InternalComputeOutputs()
        {
            var target = new FeedForwardNeuralNetwork <double>(
                new[] { 2L, 3L, 2L });

            var parser = new DoubleParser <string>();
            var matrix = TestsHelper.ReadMatrix(
                5,
                5,
                "[[-1.0, 1.0, 0.5, 0, 0], [1.0, -1.0, 0.5, 0, 0], [0, 0, 0, -1.0, 2.0], [0, 0, 0, 0.5, -1.5], [0, 0, 0, 1.0, -0.5]]",
                (i, j) => new SparseDictionaryMatrix <double>(i, j, 0),
                parser,
                true);
            var vector = TestsHelper.ReadVector(
                5,
                "[0.5, 0.5, 0.5, 0.5, 0.5]",
                new SparseDictionaryMathVectorFactory <double>(),
                parser,
                true);
            var model = new NeuralNetworkModel <double, SparseDictionaryMatrix <double>, IMathVector <double> >(
                matrix,
                vector);

            target.LoadModel(model);

            var outputMatrix = target.InternalReserveOutput();

            target.InternalComputeLayerOutputs(
                new ArrayMathVector <double>(new[] { 1.0, -1.0 }),
                outputMatrix,
                (d1, d2) =>
            {
                if (d2 > d1)
                {
                    return(1.0);
                }
                else
                {
                    return(0.0);
                }
            },
                (u, v, l) =>
            {
                var result = 0.0;
                for (var i = 0L; i < l; ++i)
                {
                    result += u[i] * v[i];
                }

                return(result);
            });

            Assert.AreEqual(target.Schema.LongCount() - 1L, outputMatrix.LongLength);

            var currOut = outputMatrix[0];

            Assert.AreEqual(0.0, currOut[0]);
            Assert.AreEqual(1.0, currOut[1]);
            Assert.AreEqual(0.0, currOut[2]);

            currOut = outputMatrix[1];
            Assert.AreEqual(0.0, currOut[0]);
            Assert.AreEqual(0.0, currOut[1]);
        }
예제 #9
0
        public void FeedFrowardNeuralNetwork_RunSimpleMatrixTest()
        {
            var target = new FeedForwardNeuralNetwork <double>(
                new[] { 2L, 3L, 2L });

            var parser = new DoubleParser <string>();
            var matrix = TestsHelper.ReadMatrix(
                5,
                5,
                "[[-1.0, 1.0, 0.5, 0, 0], [1.0, -1.0, 0.5, 0, 0], [0, 0, 0, -1.0, 2.0], [0, 0, 0, 0.5, -1.5], [0, 0, 0, 1.0, -0.5]]",
                (i, j) => new SparseDictionaryMatrix <double>(i, j, 0),
                parser,
                true);
            var vector = TestsHelper.ReadVector(
                5,
                "[0.5, 0.5, 0.5, 0.5, 0.5]",
                new SparseDictionaryMathVectorFactory <double>(),
                parser,
                true);
            var model = new NeuralNetworkModel <double, SparseDictionaryMatrix <double>, IMathVector <double> >(
                matrix,
                vector);

            target.LoadModel(model);

            var actual = target.Run(
                new[] { 1.0, 0.0 },
                (u, v, l) =>
            {
                var result = 0.0;
                for (var i = 0L; i < l; ++i)
                {
                    result += u[i] * v[i];
                }

                return(result);
            },
                (d1, d2) =>
            {
                if (d2 > d1)
                {
                    return(1.0);
                }
                else
                {
                    return(0.0);
                }
            });
            var expected = new[] { 0.0, 0.0 };

            CollectionAssert.AreEqual(expected, actual);

            actual = target.Run(
                new[] { 0.0, 1.0 },
                (u, v, l) =>
            {
                var result = 0.0;
                for (var i = 0L; i < l; ++i)
                {
                    result += u[i] * v[i];
                }

                return(result);
            },
                (d1, d2) =>
            {
                if (d2 > d1)
                {
                    return(1.0);
                }
                else
                {
                    return(0.0);
                }
            });
            expected = new[] { 0.0, 1.0 };

            CollectionAssert.AreEqual(expected, actual);

            actual = target.Run(
                new[] { 1.0, -1.0 },
                (u, v, l) =>
            {
                var result = 0.0;
                for (var i = 0L; i < l; ++i)
                {
                    result += u[i] * v[i];
                }

                return(result);
            },
                (d1, d2) =>
            {
                if (d2 > d1)
                {
                    return(1.0);
                }
                else
                {
                    return(0.0);
                }
            });
            expected = new[] { 0.0, 0.0 };

            CollectionAssert.AreEqual(expected, actual);
        }