コード例 #1
0
        public void DenseLayer_CopyLayerForPredictionModel()
        {
            var batchSize   = 1;
            var random      = new Random(232);
            var neuronCount = 5;

            var sut = new DenseLayer(neuronCount, Activation.Undefined);

            sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random);

            var layers = new List <ILayer>();

            sut.CopyLayerForPredictionModel(layers);

            var actual = (DenseLayer)layers.Single();

            Assert.AreEqual(sut.Width, actual.Width);
            Assert.AreEqual(sut.Height, actual.Height);
            Assert.AreEqual(sut.Depth, actual.Depth);

            MatrixAsserts.AreEqual(sut.Weights, actual.Weights);
            MatrixAsserts.AreEqual(sut.Bias, actual.Bias);

            Assert.AreEqual(sut.OutputActivations.RowCount, actual.OutputActivations.RowCount);
            Assert.AreEqual(sut.OutputActivations.ColumnCount, actual.OutputActivations.ColumnCount);
        }
コード例 #2
0
        public static ConvNet SimpleLinearNetwork(IActivationFunction activation = null)
        {
            activation = activation ?? new Mocks.LinearActivation();

            var net = new ConvNet(1, 1, activation: activation);

            net.IsTraining = true;
            var layer1 = new DenseLayer(1);

            net.AddLayer(layer1);
            var layer2 = new DenseLayer(1);

            net.AddLayer(layer2);
            var layer3 = new DenseLayer(1);

            net.AddLayer(layer3);
            net._Build();

            layer1.Weights[1] = 1;
            layer1.Weights[0] = 3;
            layer2.Weights[1] = -1;
            layer2.Weights[0] = 1;
            layer3.Weights[1] = 2;
            layer3.Weights[0] = -1;

            return(net);
        }
コード例 #3
0
        public static ConvNet SimpleLinearNetworkWithDropout(double drate, int dseed)
        {
            var net = new ConvNet(1, 1, activation: new Mocks.LinearActivation());

            net.IsTraining = true;
            var layer1 = new DenseLayer(1);

            net.AddLayer(layer1);
            var layer2 = new DenseLayer(1);

            net.AddLayer(layer2);
            var layer3 = new DropoutLayer(drate, dseed);

            layer3.Mask = new bool[1][, ] {
                new bool[, ] {
                    { true }
                }
            };
            net.AddLayer(layer3);
            var layer4 = new DenseLayer(1);

            net.AddLayer(layer4);
            net._Build();

            layer1.Weights[1] = 1;
            layer1.Weights[0] = 3;
            layer2.Weights[1] = -1;
            layer2.Weights[0] = 1;
            layer4.Weights[1] = 2;
            layer4.Weights[0] = -1;

            return(net);
        }
コード例 #4
0
        /// <summary>
        /// Adds a random number taken from a Gaussian distribution to all parameters of the given network
        /// with <paramref name="mutationProbability"/> probability.
        /// </summary>
        /// <param name="network">Network to be mutated.</param>
        /// <param name="mutationProbability">Probability for gene mutation.</param>
        /// <param name="variance">Variance for the Gaussian distribution.</param>
        /// <returns>The mutated network.</returns>
        private NeuralNetwork GaussianMutation(NeuralNetwork network, float mutationProbability, float variance)
        {
            DenseLayer[] layers    = network.GetLayers();
            var          newLayers = new DenseLayer[layers.Length];

            for (int k = 0; k < newLayers.Length; k++)
            {
                DenseLayer layer        = layers[k];
                var        childWeights = new double[layers[k].InputNeuronsCount + 1, layers[k].OutputNeuronsCount];

                for (int i = 0; i < childWeights.GetLength(0); i++)
                {
                    for (int j = 0; j < childWeights.GetLength(1); j++)
                    {
                        if (random.NextDouble() < mutationProbability)
                        {
                            childWeights[i, j] = layer[i, j] + random.NextGaussian(0, variance);
                        }
                        else
                        {
                            childWeights[i, j] = layer[i, j];
                        }
                    }
                }

                newLayers[k] = new DenseLayer(childWeights, layers[k].GetActivationFunction());
            }

            return(new NeuralNetwork(newLayers));
        }
コード例 #5
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="layer"></param>
        public void Add(ILayer layer)
        {
            var unitsOfPreviousLayer = 0;

            if (Layers.Count > 0)
            {
                unitsOfPreviousLayer = Layers[Layers.Count - 1].Width;
            }

            if (layer is IOutputLayer)
            {
                var denseLayer = new DenseLayer(layer.Depth, Activation.Undefined);
                Layers.Add(denseLayer);
            }

            Layers.Add(layer);

            if (layer is IBatchNormalizable) // consider adding separate interface for batch normalization
            {
                if (((IBatchNormalizable)layer).BatchNormalization)
                {
                    Layers.Add(new BatchNormalizationLayer());
                }
            }

            if (layer.ActivationFunc != Activation.Undefined)
            {
                Layers.Add(new ActivationLayer(layer.ActivationFunc));
            }
        }
コード例 #6
0
        public void TrainingLayer()
        {
            double[][] inputs  = new double[][] { new double[] { 0, 1, 0, 1 } };
            double[][] outputs = new double[][] { new double[] { 0, 1, 0 } };

            var layer = new DenseLayer(inputs[0].Length, outputs[0].Length, new Relu(), new Distance());

            for (int iter = 0; iter < 20; iter++)
            {
                // Generate input data
                for (int i = 0; i < inputs[0].Length; i++)
                {
                    inputs[0][i] = rnd.NextDouble();
                }

                // Train network
                int    step        = 0;
                double error       = double.MaxValue;
                double errorTarget = 0.0001;
                while (step < 1000 && error > errorTarget)
                {
                    error = layer.Train(inputs[0], outputs[0], 0.05);
                    step++;
                }
                Console.WriteLine($"Iteration: {iter} Step: {step} Error: {error}");
            }
        }
コード例 #7
0
        private void InitContainers(DenseLayer[] layers)
        {
            Wms     = new Matrix <float> [layers.Length];
            Wvs     = new Matrix <float> [layers.Length];
            Wms_hat = new Matrix <float> [layers.Length];
            Wvs_hat = new Matrix <float> [layers.Length];
            Wgs     = new Matrix <float> [layers.Length];
            Wsteps  = new Matrix <float> [layers.Length];
            bms     = new Matrix <float> [layers.Length];
            bvs     = new Matrix <float> [layers.Length];
            bms_hat = new Matrix <float> [layers.Length];
            bvs_hat = new Matrix <float> [layers.Length];
            bgs     = new Matrix <float> [layers.Length];
            bsteps  = new Matrix <float> [layers.Length];

            for (int i = 0; i < layers.Length; i++)
            {
                DenseLayer     layer = layers[i];
                Matrix <float> Wg    = layer.GradWeights.Vals;
                Matrix <float> bg    = layer.GradBias.Vals;
                Wms[i]     = DenseMatrix.Create(Wg.RowCount, Wg.ColumnCount, 0);
                Wvs[i]     = DenseMatrix.Create(Wg.RowCount, Wg.ColumnCount, 0);
                Wms_hat[i] = DenseMatrix.Create(Wg.RowCount, Wg.ColumnCount, 0);
                Wvs_hat[i] = DenseMatrix.Create(Wg.RowCount, Wg.ColumnCount, 0);
                Wgs[i]     = DenseMatrix.Create(Wg.RowCount, Wg.ColumnCount, 0);
                Wsteps[i]  = DenseMatrix.Create(Wg.RowCount, Wg.ColumnCount, 0);
                bms[i]     = DenseMatrix.Create(bg.RowCount, bg.ColumnCount, 0);
                bvs[i]     = DenseMatrix.Create(bg.RowCount, bg.ColumnCount, 0);
                bms_hat[i] = DenseMatrix.Create(bg.RowCount, bg.ColumnCount, 0);
                bvs_hat[i] = DenseMatrix.Create(bg.RowCount, bg.ColumnCount, 0);
                bgs[i]     = DenseMatrix.Create(bg.RowCount, bg.ColumnCount, 0);
                bsteps[i]  = DenseMatrix.Create(bg.RowCount, bg.ColumnCount, 0);
            }
        }
コード例 #8
0
    // Start is called before the first frame update
    void Start()
    {
        var network = new NeuralNetwork();
        var d1      = new DenseLayer(new Shape(1, 10, 1), ActivationType.Sigmoid);

        network.AddLayer(d1);

        var initArgs = new NetworkInitializeArgs();

        initArgs.inputShape      = new Shape(1, 28 * 28, 1);
        initArgs.initWeightRange = (-0.1f, 0.1f);
        initArgs.initBiasRange   = (-0.1f, 0.1f);
        network.Initialize(initArgs);

        var trainArgs = new NeuralNetworkTrainArgs();

        trainArgs.trainingData   = ReadTrainingData();   //设置数据
        trainArgs.trainingLabels = ReadTrainingLabels(); //设置标签
        trainArgs.learningRate   = 0.01f;                //设置学习速率,越大学习的速度越快,但出现不收敛的可能性也越大
        trainArgs.onOnceEpoch    = (i) =>
        {
            var accuracy = GetAccuracy(network, trainArgs.trainingData, trainArgs.trainingLabels);
            Debug.Log($"第{i}个训练回合, 准确率:{accuracy}");
        };
        trainArgs.trainEpoches = 100; //设置训练的回合数
        network.Train(trainArgs);     //开始训练

        TestNetwork(network, trainArgs.trainingData, trainArgs.trainingLabels);
    }
コード例 #9
0
    private NeuralNetwork CreateNewNetwork()
    {
        //先创建一个神经网络
        var network = new NeuralNetwork();
        //创建一个全连接层,输出形状为(1, 10, 1),激活函数为Sigmoid
        //var d1 = new DenseLayer(new Shape(1, 10, 1), ActivationType.Sigmoid);
        //network.AddLayer(d1);
        //创建一个全连接层,输出形状为(1, 2, 1),注意这里是最后一层,输入形状要和你需要的输出对应,激活函数为Sigmoid
        var d2 = new DenseLayer(new Shape(1, 2, 1), ActivationType.Sigmoid);

        network.AddLayer(d2);

        //初始化参数
        var initArgs = new NetworkInitializeArgs();

        //输入形状 你的输入数据形状,这里是小车的demo,输入为射线的数量
        initArgs.inputShape = new Shape(1, rayNum, 1);
        //权重的初始化范围 weight = Random.Range(-0.1f, 0.1f)
        initArgs.initWeightRange = (-0.1f, 0.1f);
        //偏执项的初始化范围 bias = Random.Range(-0.1f, 0.1f)
        initArgs.initBiasRange = (-0.1f, 0.1f);
        //初始化
        network.Initialize(initArgs);
        return(network);
    }
コード例 #10
0
ファイル: Population.cs プロジェクト: aramayyes/NNDrives
        /// <summary>
        /// Initializes a new instance of <see cref="Population"/> with random chromosomes.
        /// </summary>
        /// <param name="count">The number of chromosomes in the new Population.</param>
        /// <param name="inputsLength">The length of the input vector of neural network.</param>
        /// <param name="idGenerator">A function which takes the chromosome's order number in population
        /// and returns an id for that chromosome.</param>
        /// <returns>An instance of <see cref="Population"/> with <paramref name="count"/> random chromosomes.</returns>
        public static Population GenerateRandomPopulation(int count, int inputsLength, Func <int, string> idGenerator)
        {
            // HiddenLayer = (InputLayer + OutputLayer) / 2
            int hiddenLayerOutputsLength = (inputsLength + 2) / 2;

            var chromosomes = new List <Chromosome>();

            System.Random random = new System.Random();
            for (int i = 0; i < count; i++)
            {
                var layers = new DenseLayer[]
                {
                    new DenseLayer(inputsLength, hiddenLayerOutputsLength, LayerActivationFunctions.Sigmoid, random.NextDouble),
                    new DenseLayer(hiddenLayerOutputsLength, 2, LayerActivationFunctions.Sigmoid, random.NextDouble)
                };
                var network = new NeuralNetwork(layers);

                string chromosomeId = idGenerator != null?idGenerator(i + 1) : (i + 1).ToString();

                var chromosome = new Chromosome(chromosomeId, network);

                chromosomes.Add(chromosome);
            }

            return(new Population(chromosomes));
        }
コード例 #11
0
        public void DenseLayer_Backward()
        {
            const int fanIn       = 5;
            const int batchSize   = 2;
            const int neuronCount = 3;
            var       random      = new Random(232);

            var sut = new DenseLayer(neuronCount, Activation.Undefined);

            sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            sut.Forward(input);

            var delta = Matrix <float> .Build.Random(batchSize, neuronCount, random.Next());

            var actual = sut.Backward(delta);

            Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray()));

            var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { 0.001748383f, -0.2615477f, -0.6422306f, -0.01443626f, 0.4605991f, -0.7384186f, -0.6931117f, 0.1083627f, -0.6230267f, -1.20742f });

            MatrixAsserts.AreEqual(expected, actual);
        }
コード例 #12
0
        public void DenseLayer_MultipleBackwardsPasses()
        {
            const int fanIn       = 5;
            const int batchSize   = 2;
            const int neuronCount = 3;
            var       random      = new Random(232);

            var sut = new DenseLayer(neuronCount, Activation.Undefined);

            sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            sut.Forward(input);

            var delta = Matrix <float> .Build.Dense(batchSize, neuronCount, 1.0f);

            var expected = Matrix <float> .Build.Dense(batchSize, fanIn);

            sut.Backward(delta).CopyTo(expected);

            for (int i = 0; i < 20; i++)
            {
                var actual = sut.Backward(delta);
                Assert.AreEqual(expected, actual);
            }
        }
コード例 #13
0
 public DenseLayerVisualizer(SpatialLayer spatialLayer, TreeLayer combinationLayer, DenseLayer denseLayer, string[] actionIndex)
 {
     _actionIndex      = actionIndex;
     _spatialLayer     = spatialLayer;
     _combinationLayer = combinationLayer;
     _denseLayer       = denseLayer;
     _spatialVisuals   = GameObject.Instantiate(Resources.Load <GameObject>("DenseLayerVisualizer"));
     _outputVisuals    = GameObject.Instantiate(Resources.Load <GameObject>("OutputLayerVisualizer"));
 }
コード例 #14
0
        public void EvaluateTest()
        {
            var layer = new DenseLayer(6, 3, new IdentityActivation(), new Distance());

            layer.Initialize();

            var input = NNArray.Random(6);

            layer.Evaluate(input);
        }
コード例 #15
0
        public void DenseLayer_GradientCheck_BatchSize_10()
        {
            const int fanIn       = 5;
            const int batchSize   = 10;
            const int neuronCount = 3;

            var sut = new DenseLayer(neuronCount, Activation.Undefined);

            GradientCheckTools.CheckLayer(sut, fanIn, 1, 1, batchSize, 1e-4f, new Random(21));
        }
コード例 #16
0
        public override LayerBase Variant(float min, float max, INeuralNetwork neuralNetwork)
        {
            var variant = new DenseLayer();

            variant.activationType = activationType;
            variant.inputShape     = inputShape;
            variant.outputShape    = outputShape;
            variant.layerIndex     = layerIndex;
            variant.neuralNetwork  = neuralNetwork;
            variant._weights       = _weights.Variant(min, max);
            variant._bias          = _bias.Variant(min, max);
            return(variant);
        }
コード例 #17
0
        public override LayerBase Clone(INeuralNetwork neuralNetwork)
        {
            var clone = new DenseLayer();

            clone.activationType = activationType;
            clone.inputShape     = inputShape;
            clone.outputShape    = outputShape;
            clone.layerIndex     = layerIndex;
            clone.neuralNetwork  = neuralNetwork;
            clone._weights       = _weights.Clone();
            clone._bias          = _bias.Clone();
            return(clone);
        }
コード例 #18
0
    void SelfTrainNetwork()
    {
        _NetSelfTrain = true;

        var inputlayer   = new DenseLayer(10, 4, Activation.Sigmoid());
        var hiddenlayer1 = new DenseLayer(inputlayer, Activation.Sigmoid(), 100, LayerType.Hidden);
        var hiddenlayer2 = new DenseLayer(hiddenlayer1, Activation.Sigmoid(), 20, LayerType.Hidden);
        var outputlayer  = new DenseLayer(hiddenlayer2, Activation.Linear(), 9, LayerType.Output);

        Network = outputlayer;

        Network.Initilize();
        _NetSelfTrain = true;
    }
コード例 #19
0
 public void UpdateParams(DenseLayer[] layers)
 {
     for (int i = 0; i < layers.Length; i++)
     {
         DenseLayer     layer = layers[i];
         Matrix <float> W     = layer.Weights.Vals;
         Matrix <float> b     = layer.Bias.Vals;
         Matrix <float> gW    = layer.GradWeights.Vals;
         Matrix <float> gb    = layer.GradBias.Vals;
         gW.Multiply(learningRate, gW);
         gb.Multiply(learningRate, gb);
         W.Subtract(gW, W);
         b.Subtract(gb, b);
     }
 }
コード例 #20
0
        public void WriteWeightsToDirectory(string weightsDirectory)
        {
            Task[] tasks = new Task[model.NetworkLayers.Count];

            for (int i = 0; i < model.NetworkLayers.Count; i++)
            {
                int taski = 0 + i;

                tasks[taski] = Task.Run(() =>
                {
                    string layerPath = weightsDirectory + "\\" + model.NetworkLayers[taski].Type + taski;

                    switch (model.NetworkLayers[taski].Type)
                    {
                    case "Convolutional":
                        Directory.CreateDirectory(layerPath);
                        ConvolutionalLayer auxLayer = (ConvolutionalLayer)model.NetworkLayers[taski];

                        for (int filter = 0; filter < auxLayer.FilterNumber; filter++)
                        {
                            int taskf = 0 + filter;

                            File.WriteAllText(layerPath + "\\Filter" + filter + ".json", JsonConvert.SerializeObject(auxLayer.Filters[taskf]));
                        }
                        break;

                    case "Dense":
                        Directory.CreateDirectory(layerPath);
                        DenseLayer auxDense = (DenseLayer)model.NetworkLayers[taski];
                        for (int unit = 0; unit < auxDense.NumberOfUnits; unit++)
                        {
                            File.WriteAllText(layerPath + "\\Unit" + unit + ".json", JsonConvert.SerializeObject(auxDense.Units[unit]));
                        }
                        break;

                    default:
                        break;
                    }
                });
            }

            Task.WaitAll(tasks);

            Console.WriteLine("Weights written to file");
        }
コード例 #21
0
        static void FromImageBatches()
        {
            List <Tensor> inputs = TensorConverter.InputsFromImage("Test/");

            Layer layer1 = new DenseLayer(28 * 28, 64, ActivationType.Softmax);

            Layer layer2 = new DenseLayer(64, 10, ActivationType.ReLU);

            foreach (var input in inputs)
            {
                Tensor rl1 = layer1.Forward(input);

                Tensor rl2 = layer2.Forward(rl1);

                Console.WriteLine(rl2);
            }


            Console.ReadLine();
        }
コード例 #22
0
        public void ReadWeightsFromDirectory(string weightsDirectory)
        {
            Task[] tasks = new Task[model.NetworkLayers.Count];

            for (int i = 0; i < model.NetworkLayers.Count; i++)
            {
                int taski = 0 + i;

                tasks[taski] = Task.Run(() =>
                {
                    string layerPath = weightsDirectory + "\\" + model.NetworkLayers[taski].Type + taski;

                    switch (model.NetworkLayers[taski].Type)
                    {
                    case "Convolutional":
                        ConvolutionalLayer auxLayer = (ConvolutionalLayer)model.NetworkLayers[taski];
                        for (int filter = 0; filter < auxLayer.FilterNumber; filter++)
                        {
                            string filterPath        = layerPath + "\\Filter" + filter + ".json";
                            string json              = File.ReadAllText(filterPath);
                            auxLayer.Filters[filter] = JsonConvert.DeserializeObject <Filter>(json);
                        }
                        break;

                    case "Dense":
                        DenseLayer auxDense = (DenseLayer)model.NetworkLayers[taski];
                        for (int unit = 0; unit < auxDense.NumberOfUnits; unit++)
                        {
                            string json          = File.ReadAllText(layerPath + "\\Unit" + unit + ".json");
                            auxDense.Units[unit] = JsonConvert.DeserializeObject <Unit>(json);
                        }
                        break;

                    default:
                        break;
                    }
                });
            }

            Task.WaitAll(tasks);
        }
コード例 #23
0
    IEnumerator TrainNetwork()
    {
        var inputlayer   = new DenseLayer(10, 4, Activation.ReLU());
        var hiddenlayer1 = new DenseLayer(inputlayer, Activation.ReLU(), 30, LayerType.Hidden);
        var hiddenlayer2 = new DenseLayer(hiddenlayer1, Activation.ReLU(), 500, LayerType.Hidden);
        var outputlayer  = new DenseLayer(hiddenlayer1, Activation.TangesHyperbolic(), 2, LayerType.Output);

        Network = outputlayer;

        outputlayer.Initilize();

        var trainingdata = GenerateTrainingData();

        int epoch      = 0;
        int epochsize  = trainingdata.GetLength(0);
        var epocherror = float.MaxValue;

        yield return(0);

        while (epocherror > 10 && _Training)
        {
            yield return(0);

            epocherror = 0f;
            epoch++;
            for (var t = 0; t < epochsize; t++)
            {
                var truth = trainingdata[t, 1];
                var input = trainingdata[t, 0];

                var output = (Tensor1D)outputlayer.Forward(input);
                var dif    = output - truth;
                var sq     = dif * dif;
                epocherror += (float)Math.Pow(sq.ElementSum(), 2);
                outputlayer.Backward(dif);
            }
            ErrorText.text = epocherror.ToString();
        }
        ErrorText.text = ("Finished!");
        _Training      = false;
    }
コード例 #24
0
        public void DenseLayer_Forward()
        {
            const int fanIn       = 5;
            const int batchSize   = 2;
            const int neuronCount = 3;
            var       random      = new Random(232);

            var sut = new DenseLayer(neuronCount, Activation.Undefined);

            sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            var actual = sut.Forward(input);

            Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray()));

            var expected = Matrix <float> .Build.Dense(batchSize, neuronCount, new float[] { 0.9898463f, 0.4394523f, 0.4259368f, -1.051275f, -0.5012454f, 0.08094172f });

            MatrixAsserts.AreEqual(expected, actual);
        }
コード例 #25
0
        public void LinearRegressionTest()
        {
            // y = ax + b
            double a = 1, b = -2;
            int    count = 20;

            double[] input          = new double[count];
            double[] expectedOutput = new double[count];
            for (int i = 1; i < count; i++)
            {
                input[i]          = i;
                expectedOutput[i] = a * i + b;
            }

            var layer = new DenseLayer(1, 1, new IdentityActivation(), new Distance());

            layer.Initialize();
            layer.Biases[0]     = 0;
            layer.Weights[0, 0] = 2;

            int    epoc  = 0;
            double error = 100;

            while (++epoc < 10000 && error > 0.01)
            {
                error = layer.Train(new double[] { input[1] }, new double[] { expectedOutput[1] }, 0.01);
                error = layer.Train(new double[] { input[2] }, new double[] { expectedOutput[2] }, 0.01);
                error = layer.Train(new double[] { input[3] }, new double[] { expectedOutput[3] }, 0.01);
            }

            for (int n = 0; n < 20; n++)
            {
                for (int i = 1; i < 20; i++)
                {
                }
            }

            double bias = layer.Biases[0];
            double coef = layer.Weights[0, 0];
        }
コード例 #26
0
        /// <summary>
        /// Constructs a new network using a fixed mixing ration between two parents.
        /// </summary>
        /// <param name="parent1">The fist parent network.</param>
        /// <param name="parent2">The second parent network.</param>
        /// <param name="mixingRatio">The probability with which the first(second) child will have genes from the first(second) parent.</param>
        /// <param name="ch1">When this method returns contains the first child network.</param>
        /// <param name="ch2">When this method returns contains the second child network.</param>
        private void UniformCrossover(NeuralNetwork parent1, NeuralNetwork parent2, float mixingRatio, out NeuralNetwork ch1, out NeuralNetwork ch2)
        {
            DenseLayer[] parent1Layers = parent1.GetLayers();
            DenseLayer[] parent2Layers = parent2.GetLayers();

            var child1Layers = new DenseLayer[parent1Layers.Length];
            var child2Layers = new DenseLayer[parent1Layers.Length];

            for (int k = 0; k < child1Layers.Length; k++)
            {
                DenseLayer parent1Layer = parent1Layers[k];
                DenseLayer parent2Layer = parent2Layers[k];

                var child1Weights = new double[parent1Layer.InputNeuronsCount + 1, parent1Layer.OutputNeuronsCount];
                var child2Weights = new double[parent1Layer.InputNeuronsCount + 1, parent1Layer.OutputNeuronsCount];

                for (int i = 0; i < child1Weights.GetLength(0); i++)
                {
                    for (int j = 0; j < child1Weights.GetLength(1); j++)
                    {
                        if (random.NextDouble() < mixingRatio)
                        {
                            child1Weights[i, j] = parent1Layer[i, j];
                            child2Weights[i, j] = parent2Layer[i, j];
                        }
                        else
                        {
                            child1Weights[i, j] = parent2Layer[i, j];
                            child2Weights[i, j] = parent1Layer[i, j];
                        }
                    }
                }

                child1Layers[k] = new DenseLayer(child1Weights, parent1Layer.GetActivationFunction());
                child2Layers[k] = new DenseLayer(child2Weights, parent1Layer.GetActivationFunction());
            }

            ch1 = new NeuralNetwork(child1Layers);
            ch2 = new NeuralNetwork(child2Layers);
        }
コード例 #27
0
        public NetworkViewModel()
        {
            ILayer layer = new DenseLayer(5, 4, new IdentityActivation(), new CrossEntropy());

            this.Network = new Network(new IdentityLayer(5), new DenseLayer(4, 2, new IdentityActivation(), new CrossEntropy()));
            this.Network.AddLayer(layer);

            this.Layer1 = layer;
            this.Layer2 = this.Network.OutputLayer;



            this.Input          = (new double[this.Network.InputLayer.NbInput]).Select(x => rnd.NextDouble()).ToArray();
            this.ExpectedOutput = Utils.OneHot(this.Layer2.NbOutput, rnd.Next(0, this.Layer2.NbOutput - 1));

            this.errors = new double[this.Layer2.NbOutput];

            this.Calculate();

            this.targetError = 0.01;
            this.learnRate   = 0.01;
        }
コード例 #28
0
 public ConvolutionalNetwork(int matsize, int vecsize, int depth, int labels, params CNNArgs[] args)
 {
     _matsize               = matsize;
     _vecsize               = vecsize;
     _depth                 = depth;
     _labels                = labels;
     _args                  = args;
     InputLayer             = new SpatialLayer(matsize, depth);
     ConvolutionalLayers    = new ConvolutionalLayer[args.Length];
     SubSampleLayers        = new MeanPoolLayer[args.Length];
     ConvolutionalLayers[0] = new ConvolutionalLayer(args[0].FilterSize, args[0].FilterCount, args[0].Stride, InputLayer, Functions.Rectifier2D);
     SubSampleLayers[0]     = new MeanPoolLayer(args[0].PoolLayerSize, ConvolutionalLayers[0]);
     for (int i = 1; i < args.Length; i++)
     {
         ConvolutionalLayers[i] = new ConvolutionalLayer(args[i].FilterSize, args[i].FilterCount, args[i].Stride, SubSampleLayers[i - 1], Functions.Rectifier2D);
         SubSampleLayers[i]     = new MeanPoolLayer(args[i].PoolLayerSize, ConvolutionalLayers[i]);
     }
     FlattenLayer      = new FlattenLayer(SubSampleLayers[SubSampleLayers.Length - 1]);
     VectorInput       = new InputLayer(vecsize);
     LinearHiddenLayer = new DenseLayer(vecsize, VectorInput, Functions.Sigmoid);
     CombinationLayer  = new TreeLayer(FlattenLayer.Size(), vecsize);
     OutputLayer       = new DenseLayer(labels, CombinationLayer, Functions.Identity);
 }
コード例 #29
0
        public void AddChainLink()
        {
            var windowManager = new WindowManager();
            var context       = new AddLinkWindowViewModel();

            windowManager.ShowDialog(context);

            if (context.Result.HasValue)
            {
                int      insertIndex = ChainLinks.Count;
                LinkBase link;
                switch (context.Result.Value)
                {
                case LinkType.InputLayer:
                    if (ChainLinks.Count > 0)
                    {
                        if (ChainData.CountLinksOfType(typeof(InputLayer)) > 0)
                        {
                            MessageBox.Show("Only one Input Layer is allowed (or useful) per chain.");
                            return;
                        }
                    }
                    insertIndex = 0;
                    link        = new InputLayer(ChainData, String.Format("Input Layer"));
                    //TODO: Fix
                    ((InputDataParameter)link.Parameters[0]).InputDataValue = _parent.NetworkArchitectureData.Problem.Inputs[0];
                    break;

                case LinkType.ActivationLayer:
                    link = new ActivationLayer(ChainData, String.Format("Activation Layer"));
                    break;

                case LinkType.Convolution1DLayer:
                    link = new Convolution1DLayer(ChainData, String.Format("1D Convolution Layer"));
                    break;

                case LinkType.Convolution2DLayer:
                    link = new Convolution2DLayer(ChainData, String.Format("2D Convolution Layer"));
                    break;

                case LinkType.Convolution3DLayer:
                    link = new Convolution3DLayer(ChainData, String.Format("3D Convolution Layer"));
                    break;

                default:
                case LinkType.DenseLayer:
                    link = new DenseLayer(ChainData, String.Format("Dense Layer"));
                    break;

                case LinkType.DropoutLayer:
                    link = new DropoutLayer(ChainData, String.Format("Dropout Layer"));
                    break;

                case LinkType.FlattenLayer:
                    link = new FlattenLayer(ChainData, String.Format("Flatten Layer"));
                    break;

                case LinkType.ReshapeLayer:
                    link = new ReshapeLayer(ChainData, String.Format("Reshape Layer"));
                    break;

                case LinkType.MergeLayer:
                    link = new MergeLayer(ChainData, String.Format("Merge Layer"));
                    break;

                case LinkType.BatchNormalizationLayer:
                    link = new BatchNormalizationLayer(ChainData, String.Format("Batch Normalization Layer"));
                    break;

                case LinkType.LinearTransformationLayer:
                    link = new LinearTransformationLayer(ChainData, String.Format("Linear Transformation"));
                    break;
                }

                ChainData.ChainLinks.Insert(insertIndex, link);
                ValidateInputCompatibility();
                refreshLinks();
            }
        }
コード例 #30
0
        public void UpdateParams(DenseLayer[] layers)
        {
            if (first)
            {
                InitContainers(layers);
                first = false;
            }

            t++;

            for (int i = 0; i < layers.Length; i++)
            {
                DenseLayer     layer  = layers[i];
                Matrix <float> W      = layer.Weights.Vals;
                Matrix <float> b      = layer.Bias.Vals;
                Matrix <float> WgOrig = layer.GradWeights.Vals;
                Matrix <float> bgOrig = layer.GradBias.Vals;

                Matrix <float> Wm     = Wms[i];
                Matrix <float> Wv     = Wvs[i];
                Matrix <float> Wm_hat = Wms_hat[i];
                Matrix <float> Wv_hat = Wvs_hat[i];
                Matrix <float> Wg     = Wgs[i];
                Matrix <float> Wstep  = Wsteps[i];
                Matrix <float> bm     = bms[i];
                Matrix <float> bv     = bvs[i];
                Matrix <float> bg     = bgs[i];
                Matrix <float> bm_hat = bms_hat[i];
                Matrix <float> bv_hat = bvs_hat[i];
                Matrix <float> bstep  = bsteps[i];

                WgOrig.CopyTo(Wg);
                UpdateM(Wm, Wg);

                WgOrig.CopyTo(Wg);
                UpdateV(Wv, Wg);

                bgOrig.CopyTo(bg);
                UpdateM(bm, bg);

                bgOrig.CopyTo(bg);
                UpdateV(bv, bg);


                UpdateMHat(Wm_hat, Wm);

                UpdateVHat(Wv_hat, Wv);

                UpdateMHat(bm_hat, bm);

                UpdateVHat(bv_hat, bv);


                UpdateStep(Wstep, Wm_hat, Wv_hat);

                UpdateStep(bstep, bm_hat, bv_hat);


                W.Subtract(Wstep, W);

                b.Subtract(bstep, b);
            }
        }