Example #1
0
        /// <summary>
        /// Applies delta sums, modifying coefficients and biases of the layer as a result of back propogation.
        /// </summary>
        /// <param name="showDetails">Set to <i>true</i> - prints delta sums using PrintClass, set to <i>false</i> - no printing.</param>
        /// <param name="batchSize">The number of examples of current teaching for L2 regularization realization.</param>
        public void ApplyDeltaSums(bool showDetails, int batchSize)
        {
            // GOING THROUGH ALL NEURONS...
            for (int neuronInd = 0; neuronInd < neuronDeltas.Length; neuronInd++)
            {
                // CHANGING BIASES
                if (showDetails)
                {
                    PrintClass.Print("\n   - Neuron  " + neuronInd.ToString() + "  bias delta sum: " + biasDeltaSums[neuronInd].ToString() + "\n     coeffs delta sums: ");
                }

                biases[neuronInd]       -= biasDeltaSums[neuronInd];
                biasDeltaSums[neuronInd] = 0;
                neuronDeltas[neuronInd]  = 0;

                // GOING THROUGH ALL COEFFS OF THE NEURON...
                for (int coeffInd = 0; coeffInd < coeffs[neuronInd].Length; coeffInd++)
                {
                    // CHANGING COEFFS APPLYING L2 REGULARIZATION
                    if (showDetails)
                    {
                        PrintClass.Print(coeffDeltaSums[neuronInd][coeffInd].ToString() + " ");
                    }
                    coeffs[neuronInd][coeffInd]         = (1 - ANNetwork.SayNetworkLyambda() * ANNetwork.SayNetworkLearningSpeed() / batchSize) * coeffs[neuronInd][coeffInd] - coeffDeltaSums[neuronInd][coeffInd];
                    coeffDeltaSums[neuronInd][coeffInd] = 0;
                }
            }
            if (showDetails)
            {
                Console.Write("\n");
            }
        }
Example #2
0
        // LISTEX COMMAND
        public void ListExs()
        {
            string result = examples.ShowFileList();

            if (result.Length > 0)
            {
                PrintClass.PrintLine(result);
            }
        }
Example #3
0
        // CLEAR EXAMPLES
        public string ClearExamples()
        {
            Examples tmpExamples = Examples.Init();

            if (tmpExamples.SayExamplesNum() == 0)
            {
                return("\n-> Error clearing examples - no examples exist.");
            }

            tmpExamples.ClearExamples();
            PrintClass.PrintLine("\n-> Training and testing examples lists cleared - no examples exist any more.");

            return("");
        }
Example #4
0
        /// <summary>
        /// Prints layer parameters (coefficients and biases) using PrintClass
        /// </summary>
        public void ShowParams(bool showDetails)
        {
            PrintClass.PrintLine("  Neurons number: " + neuronDeltas.Length);
            PrintClass.PrintLine("  Inputs number: " + coeffs[0].Length);
            PrintClass.PrintLine("  Activation function type: " + AFType.ToString().Remove(AFType.ToString().IndexOf("AFType")));

            if (showDetails)
            {
                string tmpString;
                for (int i = 0; i < neuronDeltas.Length; i++)
                {
                    PrintClass.PrintLine("\n    - Neuron " + i.ToString() + ": bias " + biases[i] + ", coefficients :");
                    tmpString = "      ";
                    for (int j = 0; j < coeffs[i].Length; j++)
                    {
                        tmpString += coeffs[i][j].ToString() + "  ";
                    }
                    PrintClass.PrintLine(tmpString);
                }
            }
        }
Example #5
0
        // COST EXAMPLES
        public string Cost()
        {
            if (network == null)
            {
                return("\n-> Error calculating cost - no network exists.");
            }

            if (!network.examples.ExamplesExist())
            {
                return("\n-> Error calculating cost - no examples exists.");
            }

            string result = network.CalcAllCost(out float cost, true);

            if (result.Length > 0)
            {
                return(result);
            }

            PrintClass.PrintLine("\n-> Total cost for existing " + network.examples.SayExamplesNum().ToString() + " examples is: " + cost.ToString() + ".");

            return("");
        }
Example #6
0
 // HELP MESSAGE
 public void Help()
 {
     PrintClass.PrintLine(helpMessage);
 }
Example #7
0
        // RUN EXAMPLE
        public string RunExample(string tmpString)
        {
            tmpString = tmpString.Trim();

            // CHECKING EXAMPLES, NETWORK AND INPUT
            if (network == null)
            {
                return("\n-> Error running an example - the network doesn't exist.");
            }

            if (network.examples.SayExamplesNum() == 0)
            {
                return("\n-> Error running an example - no existing examples.");
            }

            if (tmpString.Length == 0)
            {
                return("\n-> Error running an example - not enough input data.");
            }

            // PARSING EXAMPLE INDEX
            int    exampleInd = -1;
            bool   isLast     = true;
            string result     = Parsing.ParseInt(ref tmpString, ref exampleInd, ref isLast, "examplesInd", Parsing.AfterParsingCheck.NonNegative);

            if (result.Length > 0)
            {
                return(result);
            }

            bool allLayersOutputs = false;

            // PARSING OPTION OF PRINTING ALL LAYERS OUTPUTS
            if (!isLast)
            {
                int allLOutputs = -1;
                result = Parsing.ParseInt(ref tmpString, ref allLOutputs, ref isLast, "printAllLayersOutputs", Parsing.AfterParsingCheck.NonNegative);
                if (result.Length > 0)
                {
                    return(result);
                }

                allLayersOutputs = allLOutputs == 0 ? false : true;
            }

            float[][] outputs = null;
            result = network.examples.GiveExampleLink(exampleInd, out Example theExample);
            if (result.Length < 0)
            {
                return(result);
            }

            result = network.React(theExample.SayX(), ref outputs, out int networkAnswer);
            if (result.Length > 0)
            {
                return(result);
            }

            PrintClass.Print("\n\n-> On example N " + exampleInd.ToString() + "\n(" + theExample.SayX(0));
            for (int inputInd = 1; inputInd < theExample.SayXLength(); inputInd++)
            {
                PrintClass.Print(", " + theExample.SayX(inputInd));
            }

            PrintClass.Print(")\n network replied: " + networkAnswer.ToString() + " (" + outputs[outputs.Length - 1][0].ToString());
            for (int outputInd = 1; outputInd < outputs[outputs.Length - 1].Length; outputInd++)
            {
                PrintClass.Print(", " + outputs[outputs.Length - 1][outputInd].ToString());
            }

            PrintClass.Print(").\n while expected is: " + theExample.SayExpected().ToString() + " (" + theExample.SayY(0).ToString());
            for (int outputInd = 1; outputInd < theExample.SayYLength(); outputInd++)
            {
                PrintClass.Print(", " + theExample.SayY(outputInd).ToString());
            }

            PrintClass.Print(").\n");

            if (allLayersOutputs)
            {
                for (int layerInd = 0; layerInd < outputs.Length; layerInd++)
                {
                    PrintClass.Print("\n  - Layer " + layerInd.ToString() + " outputs:\n" + outputs[layerInd][0].ToString());
                    for (int outputInd = 1; outputInd < outputs[layerInd].Length; outputInd++)
                    {
                        PrintClass.Print(", " + outputs[layerInd][outputInd].ToString());
                    }
                }
            }

            return("");
        }