示例#1
0
        public void TestDynamicXOR()
        {
            Func <int, int, double> inputFunc = OnInputFunc;
            Func <int, int, double> idealFunc = delegate(int chunk, int index) { return(XOR.XORIdeal[chunk][index]); };
            var input = new FuncMLDataProvider(inputFunc, XOR.XORInput.Length, XOR.XORInput[0].Length);
            var ideal = new FuncMLDataProvider(idealFunc, XOR.XORIdeal.Length, XOR.XORIdeal[0].Length);

            var ds = new DynamicMLDataSet(input, ideal);

            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, ds.InputSize));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, ds.InputSize + 1));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, ds.IdealSize));
            network.Structure.FinalizeStructure();
            network.Reset(42);

            var trainer = new Encog.Neural.Networks.Training.Propagation.Resilient.ResilientPropagation(network, ds);

            int maxIteration = 300;
            int iteration    = 0;

            do
            {
                trainer.Iteration();
                Debug.WriteLine(++iteration + ": Error = " + trainer.Error);
            } while(trainer.Error > 0.0001 && maxIteration > iteration);

            Assert.IsTrue(iteration < maxIteration);
        }
示例#2
0
        public void BasicSlidingSineSignal()
        {
            var listSize  = 30 * 200;
            var inputList = new List <double>(listSize);
            var idealList = new List <double>(listSize);
            var rand      = new Random(23);

            for (int i = 0; i < listSize; i++)
            {
                idealList.Add(Math.Sin(Math.PI * 2.0 * i / 30));
                inputList.Add(idealList[idealList.Count - 1] + (rand.NextDouble() - 0.5) * 0.1);
            }

            var input = new SlidingWindowMLDataProvider(inputList, 10, 0, 1);
            var ideal = new SlidingWindowMLDataProvider(idealList, 2, 11, 1);             // predecit the eleventh, twelth item from the ten previous to it
            var ds    = new DynamicMLDataSet(input, ideal);

            Assert.AreEqual(10, input.WindowSize);
            Assert.AreEqual(10, ds.InputSize);
            Assert.AreEqual(2, ds.IdealSize);
            Assert.AreEqual(listSize, ds.Count);

            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(ds.InputSize));
            network.AddLayer(new BasicLayer(ds.InputSize + 3));
            network.AddLayer(new BasicLayer(ds.IdealSize));
            network.Structure.FinalizeStructure();
            network.Reset(42);

            var trainer = new Encog.Neural.Networks.Training.Propagation.Resilient.ResilientPropagation(network, ds);

            int maxIteration = 300;
            int iteration    = 0;

            do
            {
                trainer.Iteration();
                Debug.WriteLine(++iteration + ": Error = " + trainer.Error);
            } while(trainer.Error > 0.001 && maxIteration > iteration);

            Assert.IsTrue(iteration < maxIteration);
        }
        public void BasicSlidingSineSignal()
        {
            var listSize = 30 * 200;
            var inputList = new List<double>(listSize);
            var idealList = new List<double>(listSize);
            var rand = new Random(23);
            for(int i = 0; i < listSize; i++)
            {
                idealList.Add(Math.Sin(Math.PI * 2.0 * i / 30));
                inputList.Add(idealList[idealList.Count - 1] + (rand.NextDouble() - 0.5) * 0.1);
            }

            var input = new SlidingWindowMLDataProvider(inputList, 10, 0, 1);
            var ideal = new SlidingWindowMLDataProvider(idealList, 2, 11, 1); // predecit the eleventh, twelth item from the ten previous to it
            var ds = new DynamicMLDataSet(input, ideal);

            Assert.AreEqual(10, input.WindowSize);
            Assert.AreEqual(10, ds.InputSize);
            Assert.AreEqual(2, ds.IdealSize);
            Assert.AreEqual(listSize, ds.Count);

            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(ds.InputSize));
            network.AddLayer(new BasicLayer(ds.InputSize + 3));
            network.AddLayer(new BasicLayer(ds.IdealSize));
            network.Structure.FinalizeStructure();
            network.Reset(42);

            var trainer = new Encog.Neural.Networks.Training.Propagation.Resilient.ResilientPropagation(network, ds);

            int maxIteration = 300;
            int iteration = 0;
            do
            {
                trainer.Iteration();
                Debug.WriteLine(++iteration + ": Error = " + trainer.Error);
            } while(trainer.Error > 0.001 && maxIteration > iteration);

            Assert.IsTrue(iteration < maxIteration);
        }
        public void TestDynamicXOR()
        {
            Func<int, int, double> inputFunc = OnInputFunc;
            Func<int, int, double> idealFunc = delegate(int chunk, int index) { return XOR.XORIdeal[chunk][index]; };
            var input = new FuncMLDataProvider(inputFunc, XOR.XORInput.Length, XOR.XORInput[0].Length);
            var ideal = new FuncMLDataProvider(idealFunc, XOR.XORIdeal.Length, XOR.XORIdeal[0].Length);

            var ds = new DynamicMLDataSet(input, ideal);

            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, ds.InputSize));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, ds.InputSize + 5));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, ds.IdealSize));
            network.Structure.FinalizeStructure();
            network.Reset(42);

            var trainer = new Encog.Neural.Networks.Training.Propagation.Resilient.ResilientPropagation(network, ds);

            int maxIteration = 300;
            int iteration = 0;
            do
            {
                trainer.Iteration();
                Debug.WriteLine(++iteration + ": Error = " + trainer.Error);
            } while(trainer.Error > 0.0001 && maxIteration > iteration);

            Assert.IsTrue(iteration < maxIteration);
        }