Inheritance: NeuroObjectCollection
Beispiel #1
0
 /// <summary>
 ///     <para>
 ///         Adds the contents of another <see cref="xpidea.neuro.net.patterns.PatternsCollection" /> to the end of the
 ///         collection.
 ///     </para>
 /// </summary>
 /// <param name="value">
 ///     A <see cref="xpidea.neuro.net.patterns.PatternsCollection" /> containing the objects to add to the collection.
 /// </param>
 /// <seealso cref="xpidea.neuro.net.patterns.PatternsCollection.Add" />
 public void AddRange(PatternsCollection value)
 {
     for (var i = 0; (i < value.Count); i = (i + 1))
     {
         Add(value[i]);
     }
 }
Beispiel #2
0
 public static void BuildEvenCircle(PatternsCollection aData)
 {
     BuildEvenSquare(aData);
     for (var i = aData.Count - 1; i >= 0; i--)
     {
         var x = aData[i].Input[0];
         var y = aData[i].Input[1];
         if ((x*x + y*y) > 0.8)
             aData.RemoveAt(i);
     }
 }
Beispiel #3
0
 public PatternsCollection CreateTrainingPatterns(Font font)
 {
     var result = new PatternsCollection(aCharsCount, aMatrixDim*aMatrixDim, aCharsCount);
     for (var i = 0; i < aCharsCount; i++)
     {
         var aBitMatrix = CharToBitArray(Convert.ToChar(aFirstChar + i), font, aMatrixDim, 0);
         for (var j = 0; j < aMatrixDim*aMatrixDim; j++)
             result[i].Input[j] = aBitMatrix[j];
         result[i].Output[i] = 1;
     }
     return result;
 }
Beispiel #4
0
 public static void BuildEvenSquare(PatternsCollection aData)
 {
     var c = (int) Math.Round(Math.Sqrt(aData.Count));
     var s = 2/(double) c;
     double x = -1;
     double y = -1;
     for (var i = 0; i < c; i++)
     {
         for (var j = 0; j < c; j++)
         {
             aData[i*c + j].Input[0] = x;
             aData[i*c + j].Input[1] = y;
             y = y + s;
         }
         y = -1;
         x = x + s;
     }
 }
Beispiel #5
0
			public override void Train(PatternsCollection patterns) 
			{
							
				int  iteration = 0;
				if (patterns != null) 
				{
					double error = 0;
					int good = 0;
					while (good < patterns.Count) // Train until all patterns are correct
					{
						if (Form1.IsTerminated) return;
						error = 0;
						owner.progressBar1.Value = good;
						owner.label16.Text = "Training progress: " + ((good * 100)/owner.progressBar1.Maximum).ToString() + "%";
						good = 0;
						for (int i = 0; i<patterns.Count; i++)
						{
							for (int k = 0; k<NodesInLayer(0); k++)	
								nodes[k].Value = patterns[i].Input[k];
							AddNoiseToInputPattern(owner.trackBar3.Value);
							this.Run();
							for (int k = 0;k< this.OutputNodesCount;k++) 
							{
								error += Math.Abs(this.OutputNode(k).Error);
								this.OutputNode(k).Error = patterns[i].Output[k];
							}
							this.Learn();
							if (BestNodeIndex == OutputPatternIndex(patterns[i]))
								good++;
							
							iteration ++;						
							Application.DoEvents();

						}

						foreach (NeuroLink link in links) ((EpochBackPropagationLink)link).Epoch(patterns.Count);

						if ((iteration%2) == 0)
							owner.label17.Text = "AVG Error: " + (error / OutputNodesCount).ToString() + "  Iteration: " + iteration.ToString();
					}
					owner.label17.Text = "AVG Error: " + (error / OutputNodesCount).ToString() + "  Iteration: " + iteration.ToString();
				}

			}
Beispiel #6
0
        private static void Main(string[] args)
        {
            BidirectionalAssociativeMemorySystem BAMSystem;
            var patterns = new PatternsCollection(4, 4, 4);

            SetPattern(patterns[0], -1, 1, -1, 1, 1, -1, 1, -1); //invert
            SetPattern(patterns[1], -1, -1, 1, 1, 1, 1, -1, -1);
            SetPattern(patterns[2], 1, 1, -1, -1, -1, -1, 1, 1);
            SetPattern(patterns[3], -1, -1, -1, -1, 1, 1, 1, 1);

            BAMSystem = new BidirectionalAssociativeMemorySystem(4, 4);
            BAMSystem.Train(patterns);
            BAMSystem.SaveToFile("test.net");

            //We didn't exposed following pattens to BAM, but we'd like to see what BAM will produce based on
            //previous experiance....
            //inputs			//expected values, not shown to BAM
            SetPattern(patterns[0], 1, 1, 1, 1, -1, -1, -1, -1);
            SetPattern(patterns[1], 1, 1, 1, -1, -1, -1, -1, 1);
            SetPattern(patterns[2], 1, -1, 1, -1, -1, 1, -1, 1);
            SetPattern(patterns[3], -1, 1, -1, 1, 1, -1, 1, -1);

            Console.Out.WriteLine("Input pattern:           BAM output:         Expected output:  ");
            foreach (var p in patterns)
            {
                BAMSystem.SetValuesFromPattern(p);
                BAMSystem.Run();
                foreach (var d in p.Input)
                    Console.Out.Write(d + ",");
                Console.Out.Write("             ");
                for (var i = 0; i < BAMSystem.OutputNodesCount; i++)
                    Console.Out.Write(BAMSystem.OutputNode(i).Value + ",");
                Console.Out.Write("           ");
                foreach (var d in p.Output)
                    Console.Out.Write(d + ",");
                Console.Out.WriteLine("           ");
            }
            Console.In.ReadLine();
        }
Beispiel #7
0
 private void SetPattern(PatternsCollection aPatterns, int i, double x, double y)
 {
     aPatterns[i - 1].Input[0] = x;
     aPatterns[i - 1].Input[1] = y;
 }
Beispiel #8
0
 /// <summary>
 /// </summary>
 /// <param name="mappings"></param>
 public CustomPatternEnumerator(PatternsCollection mappings)
 {
     temp           = mappings;
     baseEnumerator = temp.GetEnumerator();
 }
 /// <summary>
 /// </summary>
 /// <param name="mappings"></param>
 public CustomPatternEnumerator(PatternsCollection mappings)
 {
     temp = mappings;
     baseEnumerator = temp.GetEnumerator();
 }
Beispiel #10
0
 /// <summary>
 ///     <para>
 ///         Initializes a new instance of <see cref="xpidea.neuro.net.patterns.PatternsCollection" /> based on another
 ///         <see cref="xpidea.neuro.net.patterns.PatternsCollection" />.
 ///     </para>
 /// </summary>
 /// <param name='value'>
 ///     A <see cref="xpidea.neuro.net.patterns.PatternsCollection" /> from which the contents are copied
 /// </param>
 public PatternsCollection(PatternsCollection value)
 {
     AddRange(value);
 }
Beispiel #11
0
            public override void Train(PatternsCollection patterns)
            {
                var iteration = 0;
                if (patterns != null)
                {
                    double error = 0;
                    var good = 0;
                    while (good < patterns.Count) // Train until all patterns are correct
                    {
                        if (IsTerminated) return;
                        error = 0;
                        owner.progressBar1.Value = good;
                        owner.label16.Text = "Training progress: " + ((good*100)/owner.progressBar1.Maximum) + "%";
                        good = 0;
                        for (var i = 0; i < patterns.Count; i++)
                        {
                            for (var k = 0; k < NodesInLayer(0); k++)
                                nodes[k].Value = patterns[i].Input[k];
                            AddNoiseToInputPattern(owner.trackBar3.Value);
                            Run();
                            var idx = (int) patterns[i].Output[0];
                            for (var k = 0; k < OutputNodesCount; k++)
                            {
                                error += Math.Abs(OutputNode(k).Error);
                                if (k == idx)
                                    OutputNode(k).Error = 1;
                                else
                                    OutputNode(k).Error = 0;
                            }
                            Learn();
                            if (BestNodeIndex == idx)
                                good++;

                            iteration ++;
                            Application.DoEvents();
                        }

                        foreach (var link in links) ((EpochBackPropagationLink) link).Epoch(patterns.Count);

                        if ((iteration%2) == 0)
                            owner.label17.Text = "AVG Error: " + (error/OutputNodesCount) + "  Iteration: " + iteration;
                    }
                    owner.label17.Text = "AVG Error: " + (error/OutputNodesCount) + "  Iteration: " + iteration;
                }
            }
Beispiel #12
0
 /// <summary>
 ///     <para>
 ///         Adds the contents of another <see cref="xpidea.neuro.net.patterns.PatternsCollection" /> to the end of the
 ///         collection.
 ///     </para>
 /// </summary>
 /// <param name="value">
 ///     A <see cref="xpidea.neuro.net.patterns.PatternsCollection" /> containing the objects to add to the collection.
 /// </param>
 /// <seealso cref="xpidea.neuro.net.patterns.PatternsCollection.Add" />
 public void AddRange(PatternsCollection value)
 {
     for (var i = 0; (i < value.Count); i = (i + 1))
     {
         Add(value[i]);
     }
 }
Beispiel #13
0
 public override void Train(PatternsCollection patterns)
 {
     this.patterns = patterns;
     if (patterns != null)
         for (var i = 0; i < trainingIterations; i++)
         {
             for (var j = 0; j < patterns.Count; j++)
             {
                 SetValuesFromPattern(patterns[j]);
                 Run();
                 Learn();
             }
             Epoch(0);
             if (PatternStyles.IsTerminated)
                 return;
             if ((i%3) == 0)
                 ShowMeTheMovie();
         }
     ShowMeTheMovie();
 }
        /// <summary>
        ///     Overridden.Trains the network (makes the network learn the patterns).
        /// </summary>
        /// <param name="patterns">Training patterns.</param>
        public override void Train(PatternsCollection patterns)
        {
            //This method implementation is for reference only -
            //You may want to implement your own method by overriding this one.

            if (patterns != null)
            {
                var good = 0;
                var tolerance = 0.2;
                while (good < patterns.Count) // Train until all patterns are correct
                {
                    good = 0;
                    for (var i = 0; i < patterns.Count; i++)
                    {
                        for (var k = 0; k < NodesInLayer(0); k++) nodes[k].Value = (patterns[i]).Input[k];
                        for (var j = firstMiddleNode; j < NodesCount; j++) nodes[j].Run();
                        for (var k = firstOutputNode; k < NodesCount; k++)
                            nodes[k].Error = (patterns[i]).Output[k - firstOutputNode];
                        for (var j = NodesCount - 1; j >= firstMiddleNode; j--)
                            nodes[j].Learn();
                        var InRange = true;
                        for (var k = 0; k < OutputNodesCount; k++)
                        {
                            if (Math.Abs(OutputNode(k).Value - (patterns[i]).Output[k]) >= tolerance) InRange = false;
                            //Console.Out.WriteLine(this.OutputNode(k).Value.ToString()+"   " +this.OutputNode(k).Error.ToString());
                        }
                        if (InRange) good++;
                    }
                    Epoch(patterns.Count);
                }
            }
        }
Beispiel #15
0
            public override void Train(PatternsCollection patterns)
            {
                //This method implementation is for reference only -
                //You may want to implement your own method by overriding this one.				
                var iteration = 0;
                if (patterns != null)
                {
                    double error = 0;
                    var good = 0;
                    while (good < patterns.Count) // Train until all patterns are correct
                    {
                        if (IsTerminated) return;
                        error = 0;
                        owner.progressBar1.Value = good;
                        owner.label16.Text = "Training progress: " + ((good*100)/owner.progressBar1.Maximum) + "%";
                        good = 0;
                        for (var i = 0; i < patterns.Count; i++)
                        {
                            for (var k = 0; k < NodesInLayer(0); k++)
                                nodes[k].Value = patterns[i].Input[k];
                            AddNoiseToInputPattern(owner.trackBar3.Value);
                            Run();
                            for (var k = 0; k < OutputNodesCount; k++)
                            {
                                error += Math.Abs(OutputNode(k).Error);
                                OutputNode(k).Error = patterns[i].Output[k];
                            }
                            Learn();
                            if (BestNodeIndex == OutputPatternIndex(patterns[i]))
                                good++;

                            iteration ++;
                            Application.DoEvents();
                        }
                        if ((iteration%2) == 0)
                            owner.label17.Text = "AVG Error: " + (error/OutputNodesCount) + "  Iteration: " + iteration;
                    }
                    owner.label17.Text = "AVG Error: " + (error/OutputNodesCount) + "  Iteration: " + iteration;
                }
            }
Beispiel #16
0
 /// <summary>
 ///     Overridden.Performs network training. Here you write the code to train your network.
 /// </summary>
 /// <param name="patterns">Set of the patterns that will be exposed to a network during the training.</param>
 /// <remarks>
 ///     This method implementation is for reference only -
 ///     You may want to implement your own method by overriding this one.
 ///     This implementation will
 ///     complete network training only after the network will produce
 ///     correct output for all input patterns.
 ///     Be advised that in this example network training will never complete if input patterns
 ///     have non-linear character.
 /// </remarks>
 public override void Train(PatternsCollection patterns)
 {
     int Good, i;
     if (patterns != null)
     {
         Good = 0;
         while (Good < patterns.Count)
         {
             Good = 0;
             for (i = 0; i < patterns.Count; i++)
             {
                 SetValuesFromPattern(patterns[i]);
                 AdalineNode.Run();
                 if ((patterns[i]).Output[0] != AdalineNode.Value)
                 {
                     AdalineNode.Learn();
                     break;
                 }
                 Good++;
             }
         }
     }
 }
Beispiel #17
0
        private static void Main(string[] args)
        {
            Console.Out.WriteLine("                       BACKPROPAGATION neural network demo.");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("												 Copyright(C) XP Idea.com 2001-2004 ");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("The purpose of this demo is to show learning abilities of BACKPROP network.");
            Console.Out.WriteLine("The BACKPROP network is able to learn much more complex data patterns, than");
            Console.Out.WriteLine("Adaline network (please see OCR demo application). ");
            Console.Out.WriteLine("This example simple shows that the Backprop network is able to learn ");
            Console.Out.WriteLine("an 'exclusive OR' (XOR) operation, but the Adaline network is not able to do so.");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("         false XOR false = false");
            Console.Out.WriteLine("         true XOR false = true");
            Console.Out.WriteLine("         false XOR true = true");
            Console.Out.WriteLine("         true XOR true = false");
            Console.Out.WriteLine("");
            Console.Out.WriteLine(" As result of the training, the network will produce output ‘0’");
            Console.Out.WriteLine("corresponding to logical ‘false’ or ‘1’ corresponding to logical ‘true’ value.");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("PLEASE HIT ENTER TO CONTINUE");
            Console.In.ReadLine();

            Console.Out.WriteLine("");
            Console.Out.WriteLine("During this demo you will be prompted to enter input values");
            Console.Out.WriteLine("for the network. Then network will perform “XOR” operation on ");
            Console.Out.WriteLine("the entered values and result will be displayed to you. ");
            Console.Out.WriteLine("Please enter any values in range from 0 to 1 and hit [ENTER] when prompted. ");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("NOW THE NETWORK IS READY TO LEARN FOLLOWING PATTERNS");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("			false XOR false = false;");
            Console.Out.WriteLine("			true XOR false = true;");
            Console.Out.WriteLine("			false XOR true = true;");
            Console.Out.WriteLine("			true XOR true = false;");
            Console.Out.WriteLine("PLEASE HIT ENTER TO BEGIN TRAINING");
            Console.In.ReadLine();
            Console.Out.Write("TRAINING....");

            double d;
            BackPropagationNetwork BackPropNet;
            var patterns = new PatternsCollection(TrainingSets, 2, 1);

            SetPattern(patterns[0], 0, 0, 0);
            SetPattern(patterns[1], 0, 1, 1);
            SetPattern(patterns[2], 1, 0, 1);
            SetPattern(patterns[3], 1, 1, 0);
            //Network(0.55,0.6,
            BackPropNet = new BackPropagationNetwork(0.55, 0.6, new int[3] {2, 3, 1});
            BackPropNet.Train(patterns);
            Console.Out.WriteLine("DONE!");
            Console.Out.WriteLine("");
            //BackPropNet.SaveToFile("test.net");
            while (true)
            {
                try
                {
                    Console.Out.Write("Enter 1st value: ");
                    d = double.Parse(Console.In.ReadLine());
                    BackPropNet.InputNode(0).Value = d;
                    Console.Out.Write("Enter 2nd value: ");
                    d = double.Parse(Console.In.ReadLine());
                    BackPropNet.InputNode(1).Value = d;
                    BackPropNet.Run();
                    Console.Out.WriteLine("Result: " + Math.Round(BackPropNet.OutputNode(0).Value));
                    Console.Out.WriteLine("");
                }
                catch
                {
                    return;
                }
            }
        }
Beispiel #18
0
 /// <summary>
 ///     Overridden.Trains the network.
 /// </summary>
 /// <param name="patterns"></param>
 public override void Train(PatternsCollection patterns)
 {
     if (patterns != null)
         for (var i = 0; i < trainingIterations; i++)
         {
             for (var j = 0; j < patterns.Count; j++)
             {
                 SetValuesFromPattern(patterns[j]);
                 Run();
                 Learn();
             }
             Epoch(0);
         }
 }
Beispiel #19
0
 /// <summary>
 ///     Overridden.Trains the network to recognize specific patterns. Employs
 ///     <see cref="xpidea.neuro.net.bam.BidirectionalAssociativeMemoryOutputNode.Run" /> and
 ///     <see cref="xpidea.neuro.net.bam.BidirectionalAssociativeMemoryOutputNode.Learn" />
 ///     to teach the network.
 /// </summary>
 /// <param name="patterns">Training patterns.</param>
 public override void Train(PatternsCollection patterns)
 {
     if (patterns != null)
         for (var i = 0; i < patterns.Count; i++)
         {
             SetValuesFromPattern(patterns[i]);
             Learn();
         }
 }
Beispiel #20
0
        private static void Main(string[] args)
        {
            Console.Out.WriteLine("                       ADALINE neural network demo.");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("												 Copyright(C) XP Idea.com 2001-2004 ");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("The purpose of this demo is to show classification ability of ADALINE network.");
            Console.Out.WriteLine("The ADALINE network is able to classify LINEARY SEPARATABLE data into two");
            Console.Out.WriteLine("categories.  Such data could be presented by linear function in form of");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("         y=a*x+b;");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("	or any other similar function, including logical “OR” operation:");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("         false OR false = false;");
            Console.Out.WriteLine("			true OR false = true;");
            Console.Out.WriteLine("			false OR true = true;");
            Console.Out.WriteLine("			true OR true = true;");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("This example will show how the Adaline network can perform logical “OR” ");
            Console.Out.WriteLine("operation. As result of this operation, the network will produce output ‘-1’");
            Console.Out.WriteLine("corresponding to logical ‘false’ or ‘1’ corresponding to logical true ‘true’. ");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("PLEASE HIT ENTER TO CONTINUE");
            Console.In.ReadLine();

            Console.Out.WriteLine("");
            Console.Out.WriteLine("During this demo you will be prompted to enter input values");
            Console.Out.WriteLine("for the network. Then network will perform “OR” operation on ");
            Console.Out.WriteLine("the entered values and result will be displayed to you. ");
            Console.Out.WriteLine("Please enter any values in range from 0 to 1 and hit [ENTER] when prompted. ");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("NOW THE NETWORK IS READY TO LEARN FOLLOWING PATTERNS");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("			false OR false = false;");
            Console.Out.WriteLine("			true OR false = true;");
            Console.Out.WriteLine("			false OR true = true;");
            Console.Out.WriteLine("			true OR true = true;");
            Console.Out.WriteLine("PLEASE HIT ENTER TO BEGIN TRAINING");
            Console.In.ReadLine();
            Console.Out.Write("TRAINING....");
            double d;
            AdalineNetwork AdalineNet;
            var patterns = new PatternsCollection(TrainingSets, 2, 1);

            SetPattern(patterns[0], 0, 0, -1);
            SetPattern(patterns[1], 0, 1, 1);
            SetPattern(patterns[2], 1, 0, 1);
            SetPattern(patterns[3], 1, 1, 1);
            AdalineNet = new AdalineNetwork(2, 0.1);
            AdalineNet.Train(patterns);
            Console.Out.WriteLine("DONE!");
            Console.Out.WriteLine("");

            //	Console.Out.WriteLine("Saving the network to the file 'test.net'");
            //	AdalineNet.SaveToFile("test.net");

            Console.Out.WriteLine("");

            //AdalineNet = new AdalineNetwork( "test.net");
            while (true)
            {
                try
                {
                    Console.Out.Write("Enter 1st value: ");
                    d = double.Parse(Console.In.ReadLine());
                    AdalineNet.InputNode(0).Value = d;
                    Console.Out.Write("Enter 2nd value: ");
                    d = double.Parse(Console.In.ReadLine());
                    AdalineNet.InputNode(1).Value = d;
                    AdalineNet.AdalineNode.Run();
                    Console.Out.WriteLine("Result: " + AdalineNet.AdalineNode.Value);
                    Console.Out.WriteLine("");
                }
                catch
                {
                    return;
                }
            }
        }
Beispiel #21
0
        private void button1_Click(object sender, EventArgs e)
        {
            {
                var data = new PatternsCollection(PatternsCount, InputsCount, OutputsCount);
                var checkedItem = GetCheckedIndex();
                if (checkedItem > 6)
                    switch (checkedItem)
                    {
                        case 7:
                        {
                            PatternStyles.BuildEvenSquare(data);
                            break;
                        }
                        case 8:
                        {
                            PatternStyles.BuildEvenCircle(data);
                            break;
                        }
                    }
                else
                {
                    for (var i = 1; i <= PatternsCount; i++)
                    {
                        double x = 0;
                        double y = 0;
                        switch (checkedItem)
                        {
                            case 0:
                            {
                                PatternStyles.BuildSquare(out x, out y);
                                break;
                            }
                            case 1:
                            {
                                PatternStyles.BuildDimond(out x, out y);
                                break;
                            }
                            case 2:
                            {
                                PatternStyles.BuildPlus(out x, out y);
                                break;
                            }
                            case 3:
                            {
                                PatternStyles.BuildRing(out x, out y);
                                break;
                            }
                            case 4:
                            {
                                PatternStyles.BuildCircle(out x, out y);
                                break;
                            }
                            case 5:
                            {
                                PatternStyles.BuildTwoCircles(out x, out y);
                                break;
                            }
                            case 6:
                            {
                                PatternStyles.ChessBoard(out x, out y);
                                break;
                            }
                            default:
                                throw new ApplicationException("Please select pattern styly!");
                        }
                        SetPattern(data, i, x, y);
                    }
                }


                var LearningRateStart = 0.7;
                var LearningRateEnds = 0.05;
                var InitialNeighborhoodSize = 5;
                var NeighborhoodDecrRate = 5;
                var Iterations = 2000;
                var a = new MySelfOrganizingNetwork(InputsCount, MapSize, MapSize, LearningRateStart, LearningRateEnds,
                    InitialNeighborhoodSize, Iterations/NeighborhoodDecrRate, Iterations);
                a.control = panel1;
                a.Train(data);
                data = null;
                a = null;
            }
        }
        /// <summary>
        ///     Overridden.Performs network training. Here you write the code to train your network.
        /// </summary>
        /// <param name="patterns">Set of the patterns that will be exposed to a network during the training.</param>
        public override void Train(PatternsCollection patterns)
        {
            //This method implementation is for reference only -
            //You may want to implement your own method by overriding this one.

            if (patterns != null)
            {
                var good = 0;
                double tolerance = 0.2F;
                while (good < patterns.Count) // Train until all patterns are correct
                {
                    good = 0;
                    for (var i = 0; i < patterns.Count; i++)
                    {
                        for (var k = 0; k < NodesInLayer(0); k++)
                            nodes[k].Value = patterns[i].Input[k];
                        Run();
                        for (var k = 0; k < OutputNodesCount; k++)
                            OutputNode(k).Error = patterns[i].Output[k];
                        Learn();
                        var InRange = true;
                        for (var k = 0; k < OutputNodesCount; k++)
                        {
                            if (Math.Abs(OutputNode(k).Value - patterns[i].Output[k]) >= tolerance) InRange = false;
                            //	Console.Out.WriteLine(this.OutputNode(k).Value.ToString()+"   " +this.OutputNode(k).Error.ToString());
                            // InRange = Math.Round(nodes[k].Value) == Math.Round((patterns[i]).Output[k - firstOutputNode]);
                        }
                        if (InRange)
                            good++;
                    }
                }
            }
        }
Beispiel #23
0
 /// <summary>
 ///     <para>
 ///         Initializes a new instance of <see cref="xpidea.neuro.net.patterns.PatternsCollection" /> based on another
 ///         <see cref="xpidea.neuro.net.patterns.PatternsCollection" />.
 ///     </para>
 /// </summary>
 /// <param name='value'>
 ///     A <see cref="xpidea.neuro.net.patterns.PatternsCollection" /> from which the contents are copied
 /// </param>
 public PatternsCollection(PatternsCollection value)
 {
     AddRange(value);
 }
Beispiel #24
0
		private void button1_Click(object sender, System.EventArgs e)
		{
			trainingPatterns = CreateTrainingPatterns(label5.Font);
			tabControl1.SelectedTab = tabPage2;
		}
Beispiel #25
0
 private void button1_Click(object sender, EventArgs e)
 {
     aFirstChar = textBox1.Text[0];
     aLastChar = textBox2.Text[0];
     aCharsCount = aLastChar - aFirstChar + 1;
     label5.Text = "";
     var chCnt = aCharsCount;
     if (aCharsCount > 50)
         chCnt = 50;
     for (var i = 0; i < chCnt; i++)
         label5.Text += Convert.ToChar(aFirstChar + i) + " ";
     trainingPatterns = CreateTrainingPatterns(label5.Font);
     tabControl1.SelectedTab = tabPage2;
 }
Beispiel #26
0
 /// <summary>
 ///     Performs network training. Here you write the code to train your network.
 /// </summary>
 /// <param name="patterns">Set of the patterns that will be exposed to a network during the training.</param>
 /// <remarks>
 ///     <p>
 ///         There are several major paradigms, or approaches, to neural network learning. These include
 ///         <i>supervised, unsupervised</i>, and <i>reinforcement</i> learning. How the training data is processed is a
 ///         major aspect of these learning paradigms.
 ///     </p>
 ///     <p>
 ///         <i>Supervised</i> learning is the most common form of learning and is sometimes called programming by example.
 ///         The neural network is trained by showing it examples of the problem state or attributes along with the desired
 ///         output or action. The neural network makes a prediction based on the inputs and if the output differs from
 ///         the desired out put, then the network is adjusted or adapted to produce the correct output. This process is
 ///         repeated over and over until the agent learns to make accurate classifications or predictions. Historical data
 ///         from databases, sensor logs, or trace logs is often used as the training or example data.
 ///     </p>
 ///     <p>
 ///         <i>Unsupervised</i> learning is used when the neural network needs to recognize similarities between inputs or
 ///         to identify features in the input data. The data is presented to the network, and it adapts so that it
 ///         partitions the data into groups. The clustering or segmenting process continues until the neural network places
 ///         the
 ///         same data into the same group on successive passes over the data. An unsupervised learning algorithm performs a
 ///         type of feature detection where important common attributes in the data are extracted. The Kohonen map will be
 ///         a good example of the network using unsupervised learning.
 ///     </p>
 ///     <p>
 ///         <i>Reinforcement</i> learning is a type of supervised learning used when explicit input/ output pairs of
 ///         training data are not available. It can be used in cases where there is a sequence of inputs arid the desired
 ///         output is only known after the specific sequence occurs. This process of identifying the relationship between a
 ///         series of input values and a later output value is called temporal credit assignment. Because we provide less
 ///         specific error information, reinforcement learning usually takes longer than supervised learning and is less
 ///         efficient. However, in many situations, having exact prior information about the desired outcome is not
 ///         possible. In many ways,
 ///         reinforcement learning is the most realistic form of learning.
 ///     </p>
 /// </remarks>
 public virtual void Train(PatternsCollection patterns)
 {
 }
Beispiel #27
0
 public void DrawPatterns(PatternsCollection pat, Graphics g)
 {
     foreach (var ptn in pat)
     {
         var pen = new Pen(Color.Black, 4);
         var pt = new Point(p(ptn.Input[0]), p(ptn.Input[1]));
         var pt2 = pt;
         pt2.Offset(2, 2);
         g.DrawLine(pen, pt, pt2);
         pen.Dispose();
     }
 }