예제 #1
0
 /// <summary>
 /// For learning one step
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="argsNull"></param>
 protected override void topDownAdaptation()
 {
     if (!skipTopDown)
     {
         double[] lInput  = ArrayHelper.linearize(input.reality);
         double[] lOutput = ArrayHelper.linearize(output.reality);
         topDownTeacher.Run(lOutput, lInput);
     }
 }
예제 #2
0
        /// <summary>
        /// Implementation of the divergence operation.
        /// </summary>
        protected override void diverge()
        {
            //convert all the modalities in a single vector
            double[] realSignal; double[] predictedSignal;
            getConcatenatedModalities(out realSignal, out predictedSignal);

            //Generate the prediction
            double[] output = network.Compute(realSignal);

            //Distribute it over the Signal
            setConcatenatedModalities(null, output);

            //Proceed to learning
            if (!learningLocked)
            {
                teacher.Run(realSignal, realSignal);
            }
        }
예제 #3
0
        private static void NeuralNetworkLearningSingleAttributes(LearningData learningData)
        {
            var stopWatch = new Stopwatch();

            stopWatch.Start();

            var testMatcher     = new LoggingNeuralNetworkMatcher(learningData.TestData);
            var trainingMatcher = new LoggingNeuralNetworkMatcher(learningData.TrainingData);

            Parallel.ForEach(learningData.ActualMetadata.Keys, metadataKey =>
            {
                var metadata = new Dictionary <string, IndexableAttributeMetadata> {
                    { metadataKey, learningData.ActualMetadata[metadataKey] }
                };
                var trainingInputs  = learningData.TrainingData.Select(data => data.ToVectorArray(metadata)).ToArray();
                var trainingOutputs = learningData.TrainingData.Select(data => new[] { data.PercentMatch }).ToArray();
                var testInputs      = learningData.TestData.Select(data => data.ToVectorArray(metadata)).ToArray();
                var testOutputs     = learningData.TestData.Select(data => new[] { data.PercentMatch }).ToArray();

                if (testInputs.Length != testOutputs.Length || trainingInputs.Length != trainingOutputs.Length)
                {
                    throw new ArgumentException("Inputs and outputs data are not the same size");
                }
                var vectorSize = trainingInputs.First().Length;
                if (trainingInputs.Any(input => input.Length != vectorSize))
                {
                    throw new ArgumentException("Not all trainingInputs have the same vector size");
                }
                if (testInputs.Any(input => input.Length != vectorSize))
                {
                    throw new ArgumentException("Not test inputs have the correct vector size");
                }

                var results = new List <Tuple <int[], double, double> >();

                Parallel.For(0, 16, i =>
                {
                    var parameters = new[] { i, 1 };

                    var network =
                        new ActivationNetwork(new BipolarSigmoidFunction(), trainingInputs[0].Length,
                                              parameters); //new DeepBeliefNetwork();
                    var teacher = new ParallelResilientBackpropagationLearning(network);
                    var random  = new Random();

                    var error     = double.MaxValue;
                    var iteration = 0;
                    while (error > 0.0005 && iteration < 200)
                    {
                        iteration++;
                        //for (var i = 0; i < 10; i++)
                        {
                            //*
                            var pair = random.Next(0, trainingInputs.Length - 1);
                            error    = teacher.Run(trainingInputs[pair], trainingOutputs[pair]);
                            //*/

                            /*
                             * error = teacher.RunEpoch(trainingInputs, trainingOutputs);
                             * //*/
                            var accuracyRecallPrecision = trainingMatcher.MatchCount(network, metadata, new List <string>());
                            error = 3 - accuracyRecallPrecision.Item1 - accuracyRecallPrecision.Item2 - accuracyRecallPrecision.Item3;
                        }

                        if (iteration % 100 == 0)
                        {
                            Logger.DebugFormat("NeuralNetwork: Iteration {0} Error {1}", iteration, error);
                        }
                    }

                    var inSampleError    = teacher.ComputeError(trainingInputs, trainingOutputs);
                    var outOfSampleError = teacher.ComputeError(testInputs, testOutputs);
                    lock (results)
                    {
                        results.Add(new Tuple <int[], double, double>(parameters, inSampleError, outOfSampleError));
                    }
                    testMatcher.LogMatchCount(string.Format("{0} ({1})", metadataKey, learningData.ActualMetadata[metadataKey].Attribute.GetType().FullName), network,
                                              metadata, new List <string>());
                });

                Logger.InfoFormat("Results for {1} ({2}):\n{0}",
                                  string.Join(", ", results.Select(result => $"{string.Join("-", result.Item1)}: In: {result.Item2} Out: {result.Item3}")), metadataKey,
                                  learningData.ActualMetadata[metadataKey].Attribute.GetType().FullName);
            });

            stopWatch.Stop();
            Logger.InfoFormat("Neural Network learning (single attribute) took {0}", stopWatch.Elapsed);
        }
예제 #4
0
 /// <summary>
 /// For learning one step
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="argsNull"></param>
 protected override void bottomUpAdaptation()
 {
     double[] lInput  = ArrayHelper.linearize(input.reality);
     double[] lOutput = ArrayHelper.linearize(output.reality);
     bottomUpTeacher.Run(lInput, lOutput);
 }
        public void Learn()
        {
            var stopWatch = new Stopwatch();

            stopWatch.Start();

            var trainingInputs  = LearningData.TrainingData.Select(data => data.ToVectorArray(Metadata, PropertiesToSkip)).ToArray();
            var trainingOutputs = LearningData.TrainingData.Select(data => new[] { data.PercentMatch }).ToArray();
            var testInputs      = LearningData.TestData.Select(data => data.ToVectorArray(Metadata, PropertiesToSkip)).ToArray();
            var testOutputs     = LearningData.TestData.Select(data => new[] { data.PercentMatch }).ToArray();

            if (testInputs.Length != testOutputs.Length || trainingInputs.Length != trainingOutputs.Length)
            {
                throw new ArgumentException("Inputs and outputs data are not the same size");
            }
            var vectorSize = trainingInputs.First().Length;

            if (trainingInputs.Any(input => input.Length != vectorSize))
            {
                throw new ArgumentException("Not all trainingInputs have the same vector size");
            }
            if (testInputs.Any(input => input.Length != vectorSize))
            {
                throw new ArgumentException("Not test inputs have the correct vector size");
            }

            var testMatcher     = new LoggingNeuralNetworkMatcher(LearningData.TestData);
            var trainingMatcher = new LoggingNeuralNetworkMatcher(LearningData.TrainingData);
            var results         = new List <Tuple <int[], double, double> >();

            Parallel.For(Range.Min, Range.Max + 1, i =>
            {
                var parameters = i > 0 ? new[] { i, 1 } : new [] { 1 };

                var network =
                    new ActivationNetwork(new BipolarSigmoidFunction(), trainingInputs[0].Length,
                                          parameters); //new DeepBeliefNetwork();
                var teacher = new ParallelResilientBackpropagationLearning(network);
                var random  = new Random();

                var error     = double.MaxValue;
                var iteration = 0;
                while (error > 0.0005 && iteration < 1000)
                {
                    iteration++;
                    {
                        var pair = random.Next(0, trainingInputs.Length - 1);
                        teacher.Run(trainingInputs[pair], trainingOutputs[pair]);

                        var accuracyRecallPrecision = trainingMatcher.MatchCount(network, Metadata, PropertiesToSkip);
                        error = 3 - accuracyRecallPrecision.Item1 - accuracyRecallPrecision.Item2 - accuracyRecallPrecision.Item3;
                    }

                    if (iteration % 100 == 0)
                    {
                        Logger.DebugFormat("NeuralNetwork: Iteration {0} Error {1}", iteration, error);
                    }
                }

                var inSampleError    = teacher.ComputeError(trainingInputs, trainingOutputs);
                var outOfSampleError = teacher.ComputeError(testInputs, testOutputs);
                lock (results)
                {
                    results.Add(new Tuple <int[], double, double>(parameters, inSampleError, outOfSampleError));
                    if (error < BestError)
                    {
                        BestNetwork   = network;
                        BestParameter = i;
                        BestError     = error;
                    }
                }
                testMatcher.LogMatchCount(string.Format("{0}: {1}", Name, string.Join("-", parameters)), network, Metadata, PropertiesToSkip);
            });

            Logger.DebugFormat("Results ({0}):\n{1}", Name,
                               string.Join(", ", results.Select(result => $"{string.Join("-", result.Item1)}: In: {result.Item2} Out: {result.Item3}")));
            Logger.InfoFormat("Best {0}: {1}-1 Error {2}", Name, BestParameter, BestError);

            stopWatch.Stop();
            Logger.InfoFormat("Neural Network learning ({0}) took {1}", Name, stopWatch.Elapsed);
        }