示例#1
0
            public void Given__ASigmoidNetworkOfSuitableSize__AndSomeTrainingData(int inputLayerSize, int hiddenLayerSize, int outputLayerSize, int iterations, double trainingRate, int trainingSamplesCount)
            {
                var rawNet            = new NeuralNet3LayerSigmoid(inputLayerSize, hiddenLayerSize, outputLayerSize).Randomize(2);
                var netBeforeTraining = rawNet.ToString();
                var interpetedNet     = new InterpretedNet <string, int>(
                    rawNet,
                    s => s.Select(c => 0.1d * (c - 73)).ToArray(),
                    o => (int)(o.Single() * 11),
                    i => new [] { (ZeroToOne)(1 / 11d * i) },
                    (x, y) => x.Zip(y, (xx, yy) => Math.Abs(xx - yy))
                    );
                var trainingData        = GenerateRandomDataAndLabels(trainingSamplesCount);
                var testData            = GenerateRandomDataAndLabels(10 + trainingSamplesCount / 10);
                var scoreBeforeTraining = CountHits(interpetedNet, testData);

                new BackPropagationWithGradientDescent().ApplyToBatches(interpetedNet, trainingData, trainingSamplesCount / 10, trainingRate, iterations);
                var scoreAfterTraining = CountHits(interpetedNet, testData);

                //
                Console.Write("Hits for NN size {3}, {4}, {5} Before / After training with {0} samples : {1} / {2}",
                              trainingSamplesCount * iterations,
                              scoreBeforeTraining, scoreAfterTraining,
                              inputLayerSize, hiddenLayerSize, outputLayerSize);
                GenerateRandomDataAndLabels(10).Each(t => Console.WriteLine("{0} \t(should be \t{1}) \t: \t{2} ", t.Data, t.Label, interpetedNet.OutputFor(t.Data)));
                Console.WriteLine(netBeforeTraining);
                Console.WriteLine(rawNet);
                //
                scoreAfterTraining.Data.ShouldBeGreaterThan(scoreBeforeTraining.Data * 1.1);
            }
 public override InterpretedNet <TData, TLabel> Apply <TData, TLabel>(InterpretedNet <TData, TLabel> net, IEnumerable <V1.Pair <TData, TLabel> > trainingData, double trainingRateEta, int iterations = 1)
 {
     foreach (var pair in trainingData)
     {
         MutateNetByRandomFalls(net, trainingRateEta, pair);
     }
     return(net);
 }
 public static DeltasFor2LayersOfNet DeltasFor <TData, TLabel>(InterpretedNet <TData, TLabel> net, Pair <TData, TLabel> target)
 {
     return(DeltasFor(
                net.Net,
                net.InputEncoding(target.Data).Select(i => (double)i),
                net.ReverseInterpretation(target.Label)
                ));
 }
        int MutateNetByRandomFalls <TData, TLabel>(InterpretedNet <TData, TLabel> net, double trainingRateEta, V1.Pair <TData, TLabel> pair)
        {
            var falls     = 0;
            var bestSoFar = net.Net.OutputFor(net.InputEncoding(pair.Data));

            if (!net.OutputInterpretation(bestSoFar).Equals(pair.Label))
            {
                for (int e = 0; e < Iterations; e++)
                {
                    var deltaInputToHidden = net.Net.InputToHidden.Copy();
                    for (int i = 0; i < deltaInputToHidden.RowCount; i++)
                    {
                        for (int j = 0; j < deltaInputToHidden.ColumnCount; j++)
                        {
                            if (rnd.NextDouble() < trainingRateEta)
                            {
                                deltaInputToHidden[i, j] = Randomize(deltaInputToHidden[i, j]);
                            }
                        }
                    }
                    var deltaHiddenToOutput = net.Net.HiddenToOutput.Copy();
                    for (int i = 0; i < deltaHiddenToOutput.RowCount; i++)
                    {
                        for (int j = 0; j < deltaHiddenToOutput.ColumnCount; j++)
                        {
                            if (rnd.NextDouble() < trainingRateEta)
                            {
                                deltaHiddenToOutput[i, j] = Randomize(deltaHiddenToOutput[i, j]);
                            }
                        }
                    }
                    var deltaHiddenBiases = net.Net.HiddenLayer.Select(n => Randomize(n.Bias));
                    var deltaOutputBiases = net.Net.OutputLayer.Select(n => Randomize(n.Bias));
                    //
                    net.Net.DeltaInputToHiddenWeights(deltaInputToHidden, trainingRateEta);
                    net.Net.DeltaHiddenToOutputWeights(deltaHiddenToOutput, trainingRateEta);
                    net.Net.DeltaBiases(deltaHiddenBiases, deltaOutputBiases, trainingRateEta);

                    var newResult = net.Net.OutputFor(net.InputEncoding(pair.Data));

                    if (net.CloserOutputToTarget(pair.Label, newResult, bestSoFar).Equals(bestSoFar))
                    {
                        //then revert
                        net.Net.DeltaInputToHiddenWeights(-deltaInputToHidden, trainingRateEta);
                        net.Net.DeltaHiddenToOutputWeights(deltaHiddenToOutput, trainingRateEta);
                        net.Net.DeltaBiases(deltaHiddenBiases, deltaOutputBiases, trainingRateEta);
                    }
                    else
                    {
                        falls++;
                    }
                }
            }
            return(falls);
        }
        public virtual InterpretedNet <TData, TLabel> ApplyToBatches <TData, TLabel>(InterpretedNet <TData, TLabel> net, V1.Pair <TData, TLabel>[] trainingData, int batchSize, double trainingRateStart, int iterations = 1)
        {
            var trainingDataInRandomOrder = trainingData.OrderRandomly();

            for (int i = 0; i < iterations; i++)
            {
                var batch = trainingDataInRandomOrder.Skip(i * batchSize).Take(batchSize);
                Apply(net, batch, trainingRateStart / (1 + i));
            }
            return(net);
        }
 //TODO trainingrate is ignored
 public override InterpretedNet <TData, TLabel> Apply <TData, TLabel>(InterpretedNet <TData, TLabel> net, IEnumerable <Pair <TData, TLabel> > trainingData, double trainingRateEta, int iterations = 1)
 {
     for (int i = 0; i < iterations; i++)
     {
         foreach (var pair in trainingData)
         {
             var deltas = DeltasFor(net, pair);
             net.Net.DeltaBiases(deltas.HiddenBiases, deltas.OutputBiases, trainingRateEta);
             net.Net.DeltaHiddenToOutputWeights(deltas.OutputWeights, trainingRateEta);
             net.Net.DeltaInputToHiddenWeights(deltas.HiddenWeights, trainingRateEta);
         }
     }
     return(net);
 }
 int HitsScoredOnTestData(InterpretedNet <Image, byte> net, IEnumerable <LearningNeuralNetworks.V1.Pair <Image, byte> > testData)
 {
     return(testData.Count(d => net.OutputFor(d.Data) == d.Label));
 }
示例#8
0
 Pair <int, int> CountHits(InterpretedNet <string, int> guineaPig, Pair <string, int>[] testData)
 {
     return(new Pair <int, int>(
                testData.Count(p => guineaPig.OutputFor(p.Data) == p.Label),
                testData.Length));
 }
 /// <param name="net">The neural net, wrapped with interpretations of Data to Input weights, output weights to Label, and Label to output weights</param>
 /// <param name="trainingData">The labelled training data</param>
 /// <param name="trainingRateEta">expected to be between 0 and 1 for most algorithms, but algoritms may interpret it as they see fit.</param>
 /// <param name="iterations"></param>
 public virtual InterpretedNet <TData, TLabel> Apply <TData, TLabel>(InterpretedNet <TData, TLabel> net, IEnumerable <V1.Pair <TData, TLabel> > trainingData, double trainingRateEta, int iterations = 1)
 {
     return(net);
 }