コード例 #1
0
        public void ShouldInitializeHiddenLayerSizes()
        {
            var hidden = new[] { 2, 3, 2 };
            var nn     = new MultilayerPerceptron(1, 2, hidden);

            nn.HiddenLayerSizes.Should().Equal(hidden);
        }
コード例 #2
0
ファイル: SampleInputs.cs プロジェクト: ikhramts/NNX
 public static MultilayerPerceptron GetSample1HiddenLayerPerceptron()
 {
     var nn = new MultilayerPerceptron(2, 2, new [] {3});
     nn.Weights[0] = new[] { 0.1, 0.2, 0.3, 0.11, 0.21, 0.31, 0.12, 0.22, 0.32 };
     nn.Weights[1] = new[] { 0.4, 0.41, 0.42, 0.43, 0.5, 0.51, 0.52, 0.53 };
     return nn;
 }
コード例 #3
0
        public void XOR(int iterations, double minimumAccuracy)
        {
            var mlp = new MultilayerPerceptron <Tanh>(2, new int[] { 10, 10 }, 1);

            double[][] inputs = new double[][] {
                new double[] { 0, 0 },
                new double[] { 1, 0 },
                new double[] { 0, 1 },
                new double[] { 1, 1 },
            };
            double[][] outputs = new double[][] {
                new double[] { 0 },
                new double[] { 1 },
                new double[] { 1 },
                new double[] { 0 },
            };

            double[] output = new double[1];
            for (int i = 0; i < iterations; i++)
            {
                for (int j = 0; j < inputs.Length; j++)
                {
                    mlp.Predict(inputs[j], output);
                    mlp.Train(outputs[j], 0.4, 0.9);
                }
            }

            for (int j = 0; j < inputs.Length; j++)
            {
                mlp.Predict(inputs[j], output);
                double diff = 1 - Math.Abs(output[0] - outputs[j][0]);
                Assert.GreaterOrEqual(diff, minimumAccuracy);
            }
        }
コード例 #4
0
ファイル: Utils.cs プロジェクト: sdg002/SimplePerceptron
 /// <summary>
 /// Internal function which will do the forward pass from the first layer to the last.
 /// Dotproducts and activations at each node will be computed and cached in the context object
 /// </summary>
 /// <param name="network">The MLP</param>
 /// <param name="ctx">The context wrapper over the training vector</param>
 internal static void DoForwardPass(
     MultilayerPerceptron network, VectorPropagationContext ctx)
 {
     for (int layerindex = 0; layerindex < network.Layers.Length; layerindex++)
     {
         Layer    layerCurrent   = network.Layers[layerindex];
         Layer    layerPrevious  = null;
         int      countOfNodes   = layerCurrent.Nodes.Length;
         double[] incomingvalues = null;//Values coming into this layer
         if (layerindex == 0)
         {
             //We are on the first layer, so the inputs are the vector itself
             incomingvalues = ctx.Vector.Inputs;
         }
         else
         {
             //We are on hidden layers, so the inputs are the activations of the previous layer
             layerPrevious  = network.Layers[layerindex - 1];
             incomingvalues = layerPrevious.Nodes.Select(nd => ctx.NodeActivationCache[nd.GetID()]).ToArray();
         }
         for (int nodeindex = 0; nodeindex < countOfNodes; nodeindex++)
         {
             Neuron node       = layerCurrent.Nodes[nodeindex];
             double dotproduct = 0;
             for (int i = 0; i < node.Weights.Length; i++)
             {
                 dotproduct = dotproduct + incomingvalues[i] * node.Weights[i].Value;
             }
             dotproduct += node.Bias.Value;
             ctx.NodeDotProductsCache[node.GetID()] = dotproduct;
             double activation = ComputeActivation(layerCurrent, node, dotproduct);
             ctx.NodeActivationCache[node.GetID()] = activation;
         }
     }
 }
コード例 #5
0
        public static MultilayerPerceptron GetSample1HiddenLayerPerceptron()
        {
            var nn = new MultilayerPerceptron(2, 2, new [] { 3 });

            nn.Weights[0] = new[] { 0.1, 0.2, 0.3, 0.11, 0.21, 0.31, 0.12, 0.22, 0.32 };
            nn.Weights[1] = new[] { 0.4, 0.41, 0.42, 0.43, 0.5, 0.51, 0.52, 0.53 };
            return(nn);
        }
コード例 #6
0
ファイル: RTLR.cs プロジェクト: nagyistoce/Neuroflow
        internal RTLR(MultilayerPerceptron mlp)
        {
            Debug.Assert(mlp != null);

            this.mlp = mlp;
            this.netValueDerivates = mlp.NetValueDerivates.Values.ToArray();

            CreatePValues(mlp);
        }
コード例 #7
0
ファイル: Utils.cs プロジェクト: sdg002/SimplePerceptron
        /// <summary>
        /// This function computes the output values at the output layer using the given input vector and trained network
        /// </summary>
        /// <param name="network"></param>
        /// <param name="vector"></param>
        /// <returns></returns>
        public static double[] ComputeNetworkOutput(MultilayerPerceptron network, Vector vector)
        {
            VectorPropagationContext vectorContext = new VectorPropagationContext(vector);

            DoForwardPass(network, vectorContext);
            Layer layerLast = network.Layers.Last();

            double[] activationsFromLastLayer = layerLast.
                                                Nodes.Select(nd => vectorContext.NodeActivationCache[nd.GetID()]).ToArray();
            return(activationsFromLastLayer);
        }
        public override void Run()
        {
            var nt = new MultilayerPerceptron(
                new PerceptronParameters(),
                new SigmoidActivation(),
                2, 3, 2);

            //Helper.PrintMatrix((double[][])nt.Neurons);
            Console.WriteLine("Weights: ");
            //Helper.PrintMatrix(nt.Weights);
        }
コード例 #9
0
ファイル: Utils.cs プロジェクト: sdg002/SimplePerceptron
 /// <summary>
 /// Compute the deltas at each node
 ///
 /// </summary>
 /// <param name="perceptron"></param>
 /// <param name="ctx"></param>
 internal static void DoBackwardPassComputeDeltas(MultilayerPerceptron network, VectorPropagationContext ctx)
 {
     for (int layerindex = network.Layers.Length - 1; layerindex >= 0; layerindex--)
     {
         Layer layerCurrent = network.Layers[layerindex];
         Layer layerAhead   = null;
         int   countOfNodes = layerCurrent.Nodes.Length;
         if (layerindex == network.Layers.Length - 1)
         {
             //We are on the last layer
             double[] errorsOutputLayer = new double[countOfNodes];
             ctx.Outputs = new double[countOfNodes];
             for (int nodeindex = 0; nodeindex < countOfNodes; nodeindex++)
             {
                 Neuron nodeCurrent    = layerCurrent.Nodes[nodeindex];
                 double outputExpected = ctx.Vector.Outputs[nodeindex];
                 double outputActual   = ctx.NodeActivationCache[nodeCurrent.GetID()];
                 ctx.Outputs[nodeindex] = outputActual;
                 double errorAtNode = outputExpected - outputActual;
                 //ctx.NodeActivationCache[node.GetID()] = activation;
                 errorsOutputLayer[nodeindex] = errorAtNode;
                 double dotproduct = ctx.NodeDotProductsCache[nodeCurrent.GetID()];
                 double derivative = ComputeDerivativeOfActivation(layerCurrent, nodeCurrent, dotproduct, outputActual);
                 double deltaNode  = -errorAtNode * derivative;
                 ctx.NodeDeltaCache[nodeCurrent.GetID()] = deltaNode;
             }
             double mse = 0.5 * errorsOutputLayer.Select(e => e * e).Sum();
             ctx.MeanSquaredError = mse;
         }
         else
         {
             //We are on the intermediate layers
             layerAhead = network.Layers[layerindex + 1];
             int countOfNodesAhead = layerAhead.Nodes.Length;
             for (int nodeindex = 0; nodeindex < countOfNodes; nodeindex++)
             {
                 Neuron nodeCurrent       = layerCurrent.Nodes[nodeindex];
                 double activation        = ctx.NodeActivationCache[nodeCurrent.GetID()];
                 double dotproduct        = ctx.NodeDotProductsCache[nodeCurrent.GetID()];
                 double summationOfDeltas = 0.0;
                 //For every node ahead, sum up the weight and delta of that node
                 for (int nodeindex_ahead = 0; nodeindex_ahead < countOfNodesAhead; nodeindex_ahead++)
                 {
                     Neuron nodeAhead = layerAhead.Nodes[nodeindex_ahead];
                     double wt_from_layer_current_to_ahead = nodeAhead.Weights[nodeindex].Value;
                     double deltaNodeAhead = ctx.NodeDeltaCache[nodeAhead.GetID()];
                     summationOfDeltas += wt_from_layer_current_to_ahead * deltaNodeAhead;
                 }
                 double derivative = ComputeDerivativeOfActivation(layerCurrent, nodeCurrent, dotproduct, activation);
                 ctx.NodeDeltaCache[nodeCurrent.GetID()] = summationOfDeltas * derivative;
             }
         }
     }
 }
コード例 #10
0
        public void ShouldInitializeWeights(int numInputs, int numOutputs, int[] numHidden, int[] expectedWeightSizes)
        {
            var nn = new MultilayerPerceptron(numInputs, numOutputs, numHidden);
            nn.Weights.Should().NotBeNullOrEmpty();
            nn.Weights.Should().HaveCount(expectedWeightSizes.Length);

            for (int i = 0; i < nn.Weights.Length; i++)
            {
                nn.Weights[i].Should().HaveCount(expectedWeightSizes[i], $"Difference in layer {i}.");
            }
        }
コード例 #11
0
        public void ShouldInitializeWeights(int numInputs, int numOutputs, int[] numHidden, int[] expectedWeightSizes)
        {
            var nn = new MultilayerPerceptron(numInputs, numOutputs, numHidden);

            nn.Weights.Should().NotBeNullOrEmpty();
            nn.Weights.Should().HaveCount(expectedWeightSizes.Length);

            for (int i = 0; i < nn.Weights.Length; i++)
            {
                nn.Weights[i].Should().HaveCount(expectedWeightSizes[i], $"Difference in layer {i}.");
            }
        }
コード例 #12
0
ファイル: Utils.cs プロジェクト: sdg002/SimplePerceptron
 /// <summary>
 /// Randomize the network wts between -1 and +1
 /// </summary>
 /// <param name="network"></param>
 public static void RandomizeNetworkWeights(MultilayerPerceptron network)
 {
     Neuron[] allNodes = network.Layers.SelectMany(l => l.Nodes).ToArray();
     foreach (var node in allNodes)
     {
         foreach (var wt in node.Weights)
         {
             wt.Value = -1.0 + rnd.NextDouble() * 2.0;
         }
         node.Bias.Value = -1.0 + rnd.NextDouble() * 2.0;
     }
 }
コード例 #13
0
ファイル: RTLR.cs プロジェクト: nagyistoce/Neuroflow
        private void CreatePValues(MultilayerPerceptron mlp)
        {
            int uLayersCount = mlp.Layers.Count - 1;
            for (int lidx = 1; lidx < mlp.Layers.Count; lidx++)
            {
                var layer = mlp.Layers[lidx];
                var biases = mlp.Biases[lidx];

                var pWeightValues = new LinkedList<Marshaled<IDeviceArray[]>[]>();

                //// Biases:
                var pWeightValuesOfInput = new Marshaled<IDeviceArray[]>[biases.Size];

                for (int weightIndex = 0; weightIndex < biases.Size; weightIndex++)
                {
                    pWeightValuesOfInput[weightIndex] = mlp.AsMarshaled(new IDeviceArray[uLayersCount]);

                    for (int lidx2 = 0; lidx2 < uLayersCount; lidx2++)
                    {
                        pWeightValuesOfInput[weightIndex].Instance()[lidx2] = mlp.Adapter.DeviceArrayManagement.CreateArray(false, GetULayerSize(lidx2));
                    }
                }

                pWeightValues.AddLast(pWeightValuesOfInput);

                // Weighted conns:
                foreach (var inputConnectedLayer in layer.Layer.GetInputLayers())
                {
                    int inputIndex = mlp.GetLayerIndex(inputConnectedLayer);
                    var key = Tuple.Create(inputIndex, lidx);
                    var weigths = mlp.Weights[key];

                    pWeightValuesOfInput = new Marshaled<IDeviceArray[]>[weigths.Size];

                    for (int weightIndex = 0; weightIndex < weigths.Size; weightIndex++)
                    {
                        pWeightValuesOfInput[weightIndex] = mlp.AsMarshaled(new IDeviceArray[uLayersCount]);
                        for (int lidx2 = 0; lidx2 < uLayersCount; lidx2++)
                        {
                            pWeightValuesOfInput[weightIndex].Instance()[lidx2] = mlp.Adapter.DeviceArrayManagement.CreateArray(false, GetULayerSize(lidx2));
                        }
                    }

                    pWeightValues.AddLast(pWeightValuesOfInput);
                }

                this.pWeightValues.Add(pWeightValues.ToArray());
            }
        }
コード例 #14
0
        public Network CreateStandart(params int[] param)
        {
            /*var inputLayerNeuronsCount = 784;
            *  var hiddenLayerNeuronsCount = 100;
            *  var outputLayerNeuronsCount = 10;*/

            var inputLayerNeuronsCount  = param[0];
            var hiddenLayerNeuronsCount = param[1];
            var outputLayerNeuronsCount = param[2];

            ActivateFunction activateFunction           = (double x) => { return(1 / (1 + Math.Exp((-1) * x))); };
            ActivateFunction activateFunctionDerivative = (double x) => { return((1 / (1 + Math.Exp((-1) * x))) * (1 - (1 / (1 + Math.Exp((-1) * x))))); };


            //normal distribution
            Normal normal = new Normal(0, Math.Pow(inputLayerNeuronsCount, -0.5));

            double[][] hiddenLayerWeights = new double[inputLayerNeuronsCount][];
            for (int i = 0; i < inputLayerNeuronsCount; ++i)
            {
                hiddenLayerWeights[i] = new double[hiddenLayerNeuronsCount];
                for (int j = 0; j < hiddenLayerNeuronsCount; ++j)
                {
                    hiddenLayerWeights[i][j] = normal.Sample();
                }
            }

            //normal distribution
            normal = new Normal(0, Math.Pow(hiddenLayerNeuronsCount, -0.5));
            double[][] outputLayerWeights = new double[hiddenLayerNeuronsCount][];
            for (int i = 0; i < hiddenLayerNeuronsCount; ++i)
            {
                outputLayerWeights[i] = new double[outputLayerNeuronsCount];
                for (int j = 0; j < outputLayerNeuronsCount; ++j)
                {
                    outputLayerWeights[i][j] = normal.Sample();
                }
            }

            var layers = new List <Layer>
            {
                new Layer(hiddenLayerNeuronsCount, hiddenLayerWeights, activateFunction, activateFunctionDerivative),
                new Layer(outputLayerNeuronsCount, outputLayerWeights, activateFunction, activateFunctionDerivative),
            };

            var netwrokToReturn = new MultilayerPerceptron(layers);

            return(netwrokToReturn);
        }
コード例 #15
0
ファイル: Helper.cs プロジェクト: sdg002/SimplePerceptron
        /// <summary>
        /// Internal method to evaluate the output of a vector on a trained network and then compare with expected output
        /// </summary>
        /// <param name="perceptron"></param>
        /// <param name="vectors"></param>
        /// <returns></returns>
        internal static Vector[] EvaluateVectors(MultilayerPerceptron perceptron, IEnumerable <Vector> vectors)
        {
            List <Vector> vectorsFailed = new List <Vector>();

            foreach (Perceptron.entity.Vector vector in vectors)
            {
                double[] outputs          = Perceptron.core.Utils.ComputeNetworkOutput(perceptron, vector);
                double[] outputsQuantized = outputs.Select(opt => (opt > 0.5) ? 1.0 : 0.0).ToArray();
                if (outputsQuantized.SequenceEqual(vector.Outputs) == false)
                {
                    vectorsFailed.Add(vector);
                }
            }
            return(vectorsFailed.ToArray());
        }
コード例 #16
0
        public void MLPSerialization()
        {
            MultilayerPerceptron mlp = new MLPGenerator().Create(new int[] { 2, 2, 1 });

            mlp.Reset(0, 1);
            MLPXMLSerializer     serializer = new MLPXMLSerializer();
            XDocument            xdoc       = serializer.Serialize(mlp);
            MultilayerPerceptron mlp2       = serializer.Deserialize(xdoc);

            Assert.AreEqual(mlp2.Structure.Elements.Length, mlp.Structure.Elements.Length);
            Assert.AreEqual(mlp2.Structure.Elements[3].GetDescription(), mlp.Structure.Elements[3].GetDescription());
            Assert.AreEqual(mlp2.Structure.Elements[0].Next[0].Weight.Value, mlp.Structure.Elements[0].Next[0].Weight.Value);
            Assert.AreEqual(((NeuronBase)mlp2.Structure.Elements[4]).Previous[0].Weight.Value, ((NeuronBase)mlp.Structure.Elements[4]).Previous[0].Weight.Value);
            Assert.AreEqual(((NeuronBase)mlp2.Structure.Elements[3]).Func.GetType(), ((NeuronBase)mlp.Structure.Elements[3]).Func.GetType());
        }
コード例 #17
0
        static void Main(string[] args)
        {
            string pathToDir = @".....Datasets\MNIST\";

            // Get the training set
            DatasetSplit dataset = CsvParser.Read(pathToDir + "mnist_train.csv");

            double[,] xTrain = dataset.X;
            int[] yTrain = dataset.Y;

            // Get the testing set
            dataset         = CsvParser.Read(pathToDir + "mnist_test.csv");
            double[,] xTest = dataset.X;
            int[] yTest = dataset.Y;

            // scale data
            xTrain = xTrain.Scale(denominator: 255);
            xTest  = xTest.Scale(denominator: 255);

            // convert output labels to one-hot vectors
            int[,] yTrainCategorial = DataHelper.LabelsToCategories(yTrain, numberOfCategories: 10);

            MultilayerPerceptron mlp = new MultilayerPerceptron(
                costFunction: new CrossEntropy(),
                optimizer: new GradientDescent(),
                regularization: new Regularization().L2,
                metrics: new MulticlassClassificationMetrics().Accuracy);

            // 784 14 15 10
            mlp.Layers.Add(
                new Dense(numberOfInputs: 0, numberOfOutputs: 784, activation: new Sigmoid()));
            mlp.Layers.Add(
                new Dense(numberOfInputs: 784, numberOfOutputs: 30, activation: new Sigmoid()));
            mlp.Layers.Add(
                new Dense(numberOfInputs: 30, numberOfOutputs: 10, activation: new Sigmoid()));

            mlp.Fit(xTrain, yTrainCategorial, epochs: 400, learningRate: 0.01, lambda: 0.1);

            double accuracyTrain = mlp.Evaluate(xTrain, yTrain);

            Console.WriteLine("Accuracy of train data: {0}", accuracyTrain);

            double accuracy = mlp.Evaluate(xTest, yTest);

            Console.WriteLine("Accuracy of test data: {0}", accuracy);

            Console.ReadKey();
        }
コード例 #18
0
ファイル: Utils.cs プロジェクト: sdg002/SimplePerceptron
        /// <summary>
        /// Creates a new multilayer perceptron network
        /// </summary>
        /// <param name="inputs">No of inputs received by the network</param>
        /// <param name="nodesperlayer">Specified the nodes per layer, starting with the first hidden layer</param>
        /// <returns></returns>
        public static MultilayerPerceptron CreateNetwork(int inputs, params int[] nodesperlayer)
        {
            if (inputs <= 0)
            {
                throw new ArgumentException("No of inputs should be positive");
            }
            if (nodesperlayer == null || nodesperlayer.Length == 0)
            {
                throw new ArgumentNullException("Nodes per layer cannot be empty");
            }
            var mlp = new MultilayerPerceptron();

            mlp.Inputs = inputs;
            List <Layer> layers = new List <Layer>();

            for (int layerindex = 0; layerindex < nodesperlayer.Length; layerindex++)
            {
                List <Neuron> nodes = new List <Neuron>();
                var           layer = new Layer {
                };
                int countOfNodes    = nodesperlayer[layerindex];
                if (countOfNodes <= 0)
                {
                    throw new ArgumentException($"The count of nodes must be positive in every layer. Layer index={layerindex}");
                }
                layer.Comments = $"Layer={layerindex}";
                for (int nodeindex = 0; nodeindex < countOfNodes; nodeindex++)
                {
                    Neuron node = new Neuron();
                    if (layerindex == 0)
                    {
                        node.Weights = _CreateArrayOfWeights(inputs);
                    }
                    else
                    {
                        //No of wts would be equal to the no of nodes in the previous layer
                        node.Weights = _CreateArrayOfWeights(nodesperlayer[layerindex - 1]);  //new Weight[nodesperlayer[layerindex - 1]];
                    }
                    nodes.Add(node);
                    node.Bias = new Weight();
                }
                layer.Nodes = nodes.ToArray();
                layers.Add(layer);
            }
            mlp.Layers = layers.ToArray();
            return(mlp);
        }
コード例 #19
0
        static private Arena PrepareArenaLynx(MultilayerPerceptron perceptron)
        {
            var arena = new Arena(width, height);
            var hare  = new Hare(arena);
            var lynx  = new Lynx(arena);

            hare.Other = lynx;
            lynx.Other = hare;

            arena.AddAnimal(hare);
            arena.AddAnimal(lynx);

            hare.Perceptron = HarePerceptron;
            lynx.Perceptron = perceptron;

            return(arena);
        }
コード例 #20
0
 protected void InitSimpleNeuralNetwork()
 {
     if (SimpleNeuralNetwork == null)
     {
         SimpleNeuralNetwork = new MultilayerPerceptron(new PerceptronParameters {
             LearningSpeed = LearnParameters.Parameters.LearnSpeed,
             Moment        = LearnParameters.Parameters.Moment
         },
                                                        new SigmoidActivation(LearnParameters.Parameters.ActivationCoefficient),
                                                        LearnParameters.CellParameters.Select(x => x.LengthOfInput).Append(1).ToArray()
                                                        );
         if (LoadNetwork)
         {
             SimpleNeuralNetwork.Load(NeuralNetworkName);
         }
     }
 }
コード例 #21
0
ファイル: MLPTest.cs プロジェクト: dawmaj/neurals
        public void PulseManySameResultsTest()
        {
            MLPGenerator gen = new MLPGenerator();

            int[] layers            = new int[] { 4, 3, 2 };
            MultilayerPerceptron nn = gen.Create(layers, 1, new Sigmoid());

            double[] inp  = new double[] { -0.978, 2.34, 0.2, -0.33 };
            double[] res  = nn.Pulse(inp);
            double[] res2 = nn.Pulse(inp);
            double[] res3 = nn.Pulse(inp);
            Assert.AreEqual(res.Length, res2.Length);
            Assert.AreEqual(res.Length, res3.Length);
            for (int i = 0; i < res.Length; i++)
            {
                Assert.AreEqual(res[i], res2[i]);
                Assert.AreEqual(res[i], res3[i]);
            }
        }
コード例 #22
0
ファイル: RTLR.cs プロジェクト: nagyistoce/Neuroflow
        internal RTLR(MultilayerPerceptron mlp)
        {
            Debug.Assert(mlp != null);

            this.mlp = mlp;
            this.netValueDerivates = mlp.AsMarshaled(mlp.NetValueDerivates.Values.ToArray());
            this.inputLayerInfos = mlp.AsMarshaled(
                (from lidx in Enumerable.Range(1, mlp.Layers.Count - 1)
                 let layer = mlp.Layers[lidx].Layer
                 select (from inputLayer in layer.GetInputLayers()
                         let iidx = mlp.GetLayerIndex(inputLayer)
                         select new RTLRLayerInfo
                         {
                             Index = iidx - 1,
                             Size = inputLayer.Size,
                             Weights = mlp.Weights[Tuple.Create(iidx, lidx)],
                             IsElementOfU = inputLayer != mlp.Layers[0].Layer
                         }).ToArray()).ToArray());

            CreatePValues();
        }
コード例 #23
0
        public double CompositionClassification()
        {
            //Get single Test Instance from CSV file
            CSVLoader loader = new CSVLoader();

            loader.setSource(new java.io.File("GetComposition.csv"));
            Instances testinstances = loader.getDataSet();

            testinstances.setClassIndex(testinstances.numAttributes() - 1);
            Instance sekarang = testinstances.lastInstance();

            //Get and build saved model
            MultilayerPerceptron model = new MultilayerPerceptron();

            model = (MultilayerPerceptron)SerializationHelper.read("CompositionModel.model");

            //Classify actual test instance
            double clsValue = model.classifyInstance(sekarang);

            return(clsValue);
        }
コード例 #24
0
        public static string TrainMultilayerPerceptron(
            [ExcelArgument(Description = "Name of perceptron object to create.")] string name,
            [ExcelArgument(Description = "Name of the trainer that will train this neural network.")] string trainerName,
            [ExcelArgument(Description = "Matrix of training inputs.")] object[,] inputs,
            [ExcelArgument(Description = "Matrix of training targets.")] object[,] targets,
            [ExcelArgument(Description = "Number of nodes in each hidden layer, not including biases. " +
                                         "Must be an array of integers.")] double[] hiddenLayerSizes)
        {
            var inputTargets = PrepareInputTargetSet(inputs, targets);

            var inputWidth  = inputs.GetLength(1);
            var targedWidth = targets.GetLength(1);
            var trainer     = ObjectStore.Get <ITrainer>(trainerName);

            var intHiddenLayerSizes = hiddenLayerSizes.Select(h => (int)h).ToArray();
            var nn = new MultilayerPerceptron(inputWidth, targedWidth, intHiddenLayerSizes);

            trainer.Train(inputTargets, nn);

            ObjectStore.Add(name, nn);
            return(name);
        }
コード例 #25
0
        public static string MakeMultilayerPerceptron(
            [ExcelArgument(Description = "Name of perceptron object to create.")] string name,
            [ExcelArgument(Description = "Number of input nodes, not including input bias.")] int numInputs,
            [ExcelArgument(Description = "Number of output nodes.")] int numOutputs,
            [ExcelArgument(Description = "Number of nodes in each hidden layer, not including biases. " +
                                         "Must be an array of integers.")] double[] hiddenLayerSizes,
            [ExcelArgument(Description = "Weights object created using =nnMakeWeights().")] string weights)
        {
            var nn = new MultilayerPerceptron(numInputs, numOutputs, hiddenLayerSizes.ToIntArray());

            double[][] weightValues;

            if (!ObjectStore.TryGet(weights, out weightValues))
            {
                throw new NNXException("Argument Weights should be a weights object created using nnMakeWeights().");
            }

            if (weightValues.Length != nn.Weights.Length)
            {
                throw new NNXException($"Argument Weights was expected to have {nn.Weights.Length} " +
                                       $"layers; had: {weightValues.Length}.");
            }

            for (var layer = 0; layer < weightValues.Length; layer++)
            {
                if (weightValues[layer].Length != nn.Weights[layer].Length)
                {
                    throw new NNXException($"Argument Weights was expected to have {nn.Weights[layer].Length} " +
                                           $"values in layer {layer + 1}; had: {weightValues[layer].Length}.");
                }
            }

            weightValues.DeepCopyTo(nn.Weights);

            ObjectStore.Add(name, nn);
            return(name);
        }
コード例 #26
0
ファイル: Program.cs プロジェクト: mannu598/DlibDotNet
        private static void Main()
        {
            // The mlp takes column vectors as input and gives column vectors as output.  The dlib::matrix
            // object is used to represent the column vectors. So the first thing we do here is declare
            // a convenient typedef for the matrix object we will be using.

            // This typedef declares a matrix with 2 rows and 1 column.  It will be the
            // object that contains each of our 2 dimensional samples.   (Note that if you wanted
            // more than 2 features in this vector you can simply change the 2 to something else)
            //typedef matrix<double, 2, 1 > sample_type;

            // make an instance of a sample matrix so we can use it below
            using (var sample = new SampleType(2, 1))
            {
                // Create a multi-layer perceptron network.   This network has 2 nodes on the input layer
                // (which means it takes column vectors of length 2 as input) and 5 nodes in the first
                // hidden layer.  Note that the other 4 variables in the mlp's constructor are left at
                // their default values.
                using (var net = new MultilayerPerceptron <Kernel1>(2, 5))
                {
                    // Now let's put some data into our sample and train on it.  We do this
                    // by looping over 41*41 points and labeling them according to their
                    // distance from the origin.
                    for (var i = 0; i < 1000; ++i)
                    {
                        for (var r = -20; r <= 20; ++r)
                        {
                            for (var c = -20; c <= 20; ++c)
                            {
                                sample[0] = r;
                                sample[1] = c;

                                // if this point is less than 10 from the origin
                                if (Math.Sqrt((double)r * r + c * c) <= 10)
                                {
                                    net.Train(sample, 1);
                                }
                                else
                                {
                                    net.Train(sample, 0);
                                }
                            }
                        }
                    }

                    // Now we have trained our mlp.  Let's see how well it did.
                    // Note that if you run this program multiple times you will get different results. This
                    // is because the mlp network is randomly initialized.

                    // each of these statements prints out the output of the network given a particular sample.

                    sample[0] = 3.123;
                    sample[1] = 4;
                    using (var ret = net.Operator(sample))
                        Console.WriteLine($"This sample should be close to 1 and it is classified as a {ret}");

                    sample[0] = 13.123;
                    sample[1] = 9.3545;
                    using (var ret = net.Operator(sample))
                        Console.WriteLine($"This sample should be close to 0 and it is classified as a {ret}");

                    sample[0] = 13.123;
                    sample[1] = 0;
                    using (var ret = net.Operator(sample))
                        Console.WriteLine($"This sample should be close to 0 and it is classified as a {ret}");
                }
            }
        }
コード例 #27
0
        static private MultilayerPerceptron TrainLynx()
        {
            Dictionary <MultilayerPerceptron, double> perceptronList = new Dictionary <MultilayerPerceptron, double>();

            for (int i = 0; i < 1000; i++)
            {
                var perceptron = new MultilayerPerceptron(4, 4, 2);
                perceptron.RandomWeights(50);
                perceptronList.Add(perceptron, RunOnce(PrepareArenaLynx(perceptron)));
            }

            //Get the 10 fastest perceptrons in the first generation
            Dictionary <MultilayerPerceptron, double> tenFastestPerceptrons = perceptronList.OrderBy(pair => pair.Value)
                                                                              .Take(10)
                                                                              .ToDictionary(pair => pair.Key, pair => pair.Value);
            double bestTime       = double.MaxValue;
            var    bestPerceptron = new MultilayerPerceptron(4, 4, 2); // Pick a random perceptron

            //Find the longest time (longest survival)
            foreach (KeyValuePair <MultilayerPerceptron, double> entry in tenFastestPerceptrons)
            {
                if (entry.Value < bestTime)
                {
                    bestTime       = entry.Value;
                    bestPerceptron = entry.Key;
                }
            }
            Debug.WriteLine("1\t" + bestTime);

            //Continue making new generations until the time doesn't increase
            int  genNumber     = 2;
            bool timeDecreased = false;
            Dictionary <MultilayerPerceptron, double> newGenerationPerceptronList = new Dictionary <MultilayerPerceptron, double>();

            do
            {
                timeDecreased = false;
                //Empty the dictionary
                newGenerationPerceptronList.Clear();
                //Create 100 perceptrons based on each of the 10 fastest perceptrons
                foreach (KeyValuePair <MultilayerPerceptron, double> entry in tenFastestPerceptrons)
                {
                    for (int i = 0; i < 100; i++)
                    {
                        var perceptron     = entry.Key.RandomClone(1);
                        var perceptronTime = RunOnce(PrepareArenaLynx(perceptron));
                        newGenerationPerceptronList.Add(perceptron, perceptronTime);
                    }
                }
                //Find the 10 longest times in the new generation list
                tenFastestPerceptrons = newGenerationPerceptronList.OrderBy(pair => pair.Value)
                                        .Take(10)
                                        .ToDictionary(pair => pair.Key, pair => pair.Value);

                //Find the best time in the ten fastest(longest survival)
                foreach (KeyValuePair <MultilayerPerceptron, double> entry in tenFastestPerceptrons)
                {
                    if (entry.Value < bestTime)
                    {
                        bestTime       = entry.Value;
                        bestPerceptron = entry.Key;
                        timeDecreased  = true;
                    }
                }
                Debug.WriteLine(genNumber + "\t" + bestTime);
                genNumber += 1;
            }while (timeDecreased == true);



            //Dr. Dong's code
            //var perceptron = new Perceptron(4, 2);
            //// Random starting point
            //perceptron.RandomWeights(50);
            LynxPerceptron = bestPerceptron;
            return(bestPerceptron);
        }
コード例 #28
0
        public override void Run()
        {
            var nt = new MultilayerPerceptron(
                new PerceptronParameters {
                LearningSpeed = 0.7, Moment = 0.3
            },
                new SigmoidActivation(4),
                2, 2, 1);

            //Helper.PrintMatrix(nt.Neurons);

            /*nt.Weights = new double[2][][];
             * nt.Weights[0] = new double[3][] {
             *      new double[] { 0.1, 0.2 },
             *      new double[] { 0.3, 0.4 },
             *      new double[] { 0.5, 0.6 },
             * };
             * nt.Weights[1] = new double[1][] {
             *      new double[] { 0.7, 0.9, 0.11 },
             *      //new double[] { 0.8, 0.14, 0.12 }
             * };*/
            Console.WriteLine("Weights: ");
            //Helper.PrintMatrix(nt.Weights);
            var learn = new double[12][] {
                new [] { 0.018088, 0.01591 },
                new [] { 0.0248, -0.00912 },
                new [] { -0.013727, 0.00502 },
                new [] { -0.023491, 0.007678 },
                new [] { -0.011982, 0.025521 },
                new [] { 0.00835, -0.0316 },
                new [] { 0.041049, -0.041505 },
                new [] { 0.050914, -0.046292 },
                new [] { 0.076138, -0.106684 },
                new [] { 0.131035, -0.092031 },
                new [] { 0.206694, -0.209201 },
                new [] { 0.168238, -0.211099 }
            };
            var middleError = 0.0;

            for (var k = 1; k < 10000000; k++)
            {
                Vector result = null;
                Vector error  = null;
                var    st     = new Stopwatch();
                st.Start();
                for (var j = 0; j < 1000; j++)
                {
                    for (var i = 0; i < learn.Length - 1; i++)
                    {
                        (result, error) = nt.Learn(learn[i], new double[] { learn[i + 1][0] });
                        middleError    += error[0];
                        //Console.WriteLine(result.Error[0] + "_");
                    }
                }
                st.Stop();
                Console.Clear();
                Console.WriteLine(st.Elapsed);
                Console.WriteLine(middleError / nt.Epoch);
                Console.WriteLine(error[0]);
                for (var i = 0; i < learn.Length - 1; i++)
                {
                    var res     = nt.Run(learn[i]);
                    var result1 = nt.ConvertOutput(res);
                    Console.WriteLine(result1[0] + "\t" + learn[i + 1][0]);
                }
            }

            middleError = middleError / (10000 * 11);
            Console.WriteLine(middleError + "_");
        }
コード例 #29
0
        public void ShouldInitializeNumOutputs()
        {
            var nn = new MultilayerPerceptron(1, 2, new[] { 3 });

            Assert.Equal(2, nn.NumOutputs);
        }
コード例 #30
0
        public void Iris(int iterations, double minimumAccuracy)
        {
            string[] lines = File.ReadAllLines("./Data/iris.csv");

            double[][] inputs  = new double[lines.Length][];
            double[][] outputs = new double[lines.Length][];
            for (int i = 0; i < lines.Length; i++)
            {
                inputs[i]  = new double[4];
                outputs[i] = new double[1];
                string[] values = lines[i].Split(',');

                for (int j = 0; j < 4; j++)
                {
                    inputs[i][j] = double.Parse(values[j]);
                }

                outputs[i][0] = Map(values[4]);
            }

            for (int i = 0; i < 4; i++)
            {
                double min = double.PositiveInfinity;
                double max = double.NegativeInfinity;
                for (int j = 0; j < inputs.Length; j++)
                {
                    if (inputs[j][i] < min)
                    {
                        min = inputs[j][i];
                    }
                    if (inputs[j][i] > max)
                    {
                        max = inputs[j][i];
                    }
                }

                for (int j = 0; j < inputs.Length; j++)
                {
                    inputs[j][i] = Normalize((inputs[j][i] - min) / (max - min));
                }
            }

            var mlp = new MultilayerPerceptron <Tanh>(4, new int[] { 10, 10 }, 1);

            double normalizedCorrect = 0;

            double[] output = new double[1];
            for (int i = 0; i < iterations; i++)
            {
                int correct = 0;
                for (int j = 0; j < inputs.Length; j++)
                {
                    mlp.Predict(inputs[j], output);
                    mlp.Train(outputs[j], 0.1, 0.4);

                    double expectedOutput = outputs[j][0];
                    if (Map(expectedOutput) == Map(output[0]))
                    {
                        correct++;
                    }
                }

                normalizedCorrect = (double)correct / inputs.Length;
            }

            Assert.GreaterOrEqual(normalizedCorrect, minimumAccuracy);
        }
コード例 #31
0
        static void Main(string[] args)
        {
            
            #region Linear Regression

            Console.WriteLine("Linear Regression");
            double[] input = new double[] { -2, -1, 1, 4 };
            double[] output = new double[] { -3, -1, 2, 3 };
            LinearRegression linearRegression = new LinearRegression();
            linearRegression.Training(input, output);
            Console.WriteLine("Result: " + Math.Round(linearRegression.Run(0.5d),2));
            Console.WriteLine("Coefficient Determination: " + Math.Round(linearRegression.CoefficientDetermination,2));
            Console.WriteLine("--------------------------");

            #endregion

            #region Multiple Linear Regression

            double[,] inputTrain = { { 2d, 3d }, { 2.5d, 2d }, { 1.8d, 4d } };
            double[] outputTrain = { 5d, 6d, 4d };
            MultipleLinearRegression mlr = new MultipleLinearRegression(inputTrain.GetLength(1), 0.5d);
            mlr.Training(inputTrain, outputTrain);
            Console.WriteLine("Multiple Linear Regression");
            Console.WriteLine("Result: " + Math.Round(mlr.Run(new[] { 2.6d, 2.1d }), 2));
            Console.WriteLine("--------------------------");

            #endregion

            #region Perceptron

            Console.WriteLine("Perceptron");

            #region AND Gate
            double[,] inputAnd = new double[,] { { 1, 0 }, { 1, 1 }, { 0, 1 }, { 0, 0 } };
            int[] outputAnd = new int[] { 0, 1, 0, 0 };

            Perceptron p1 = new Perceptron();
            p1.Training(inputAnd, outputAnd);
            
            Console.WriteLine("AND Gate");
            Console.WriteLine("Iteration of training: " + p1.Iteration);
            Console.WriteLine("Test 1: " + p1.Run(new double[,] { { 1, 0 } }));
            Console.WriteLine("Test 2: " + p1.Run(new double[,] { { 1, 1 } }));
            #endregion

            #region OR Gate
            double[,] inputOr = new double[,] { { 1, 0 }, { 1, 1 }, { 0, 1 }, { 0, 0 } };
            int[] outputOr = new int[] { 1, 1, 1, 0 };

            Perceptron p2 = new Perceptron();
            p2.Training(inputOr, outputOr);
            Console.WriteLine("OR Gate");
            Console.WriteLine("Iteration of training: " + p2.Iteration);
            Console.WriteLine("Test 1: " + p2.Run(new double[,] { { 0, 1 } }));
            Console.WriteLine("Test 2: " + p2.Run(new double[,] { { 0, 0 } }));
            #endregion
            
            Console.WriteLine("--------------------------");

            #endregion

            #region Multilayer Parceptron
            Console.WriteLine("Multilayer Parceptron");
            MultilayerPerceptron mlp = new MultilayerPerceptron(2, 5, 1);
            mlp.Training(new double[,] { { 1, 1 }, { 1, 0 }, { 0, 0 }, { 0, 1 } }, new double[] { 1, 1, 0, 1 });
            Console.WriteLine("OR Gate: " + Math.Round(mlp.Run(new double[] { 0, 1 }).FirstOrDefault(), 1));
            Console.WriteLine("--------------------------");
            #endregion

            Console.ReadKey();
        }
コード例 #32
0
 public void ShouldInitializeNumOutputs()
 {
     var nn = new MultilayerPerceptron(1, 2, new[] { 3 });
     Assert.Equal(2, nn.NumOutputs);
 }
コード例 #33
0
 public void ShouldInitializeHiddenLayerSizes()
 {
     var hidden = new[] {2, 3, 2};
     var nn = new MultilayerPerceptron(1, 2, hidden);
     nn.HiddenLayerSizes.Should().Equal(hidden);
 }