/// <summary> /// Applies delta sums, modifying coefficients and biases of the layer as a result of back propogation. /// </summary> /// <param name="showDetails">Set to <i>true</i> - prints delta sums using PrintClass, set to <i>false</i> - no printing.</param> /// <param name="batchSize">The number of examples of current teaching for L2 regularization realization.</param> public void ApplyDeltaSums(bool showDetails, int batchSize) { // GOING THROUGH ALL NEURONS... for (int neuronInd = 0; neuronInd < neuronDeltas.Length; neuronInd++) { // CHANGING BIASES if (showDetails) { PrintClass.Print("\n - Neuron " + neuronInd.ToString() + " bias delta sum: " + biasDeltaSums[neuronInd].ToString() + "\n coeffs delta sums: "); } biases[neuronInd] -= biasDeltaSums[neuronInd]; biasDeltaSums[neuronInd] = 0; neuronDeltas[neuronInd] = 0; // GOING THROUGH ALL COEFFS OF THE NEURON... for (int coeffInd = 0; coeffInd < coeffs[neuronInd].Length; coeffInd++) { // CHANGING COEFFS APPLYING L2 REGULARIZATION if (showDetails) { PrintClass.Print(coeffDeltaSums[neuronInd][coeffInd].ToString() + " "); } coeffs[neuronInd][coeffInd] = (1 - ANNetwork.SayNetworkLyambda() * ANNetwork.SayNetworkLearningSpeed() / batchSize) * coeffs[neuronInd][coeffInd] - coeffDeltaSums[neuronInd][coeffInd]; coeffDeltaSums[neuronInd][coeffInd] = 0; } } if (showDetails) { Console.Write("\n"); } }
// LISTNET COMMAND public void ListNets() { if (network == null) { // NO NETWORK CREATED - CREATING NEW network = ANNetwork.NewNetwork(new int[] { 1 }, 1); } network.ShowFileList(); }
// LOAD NETWORK COMMAND public string LoadNetwork(string tmpString, ref ANNetwork externalNetworkLink) { if (network == null) { // NO NETWORK CREATED - CREATING NEW network = ANNetwork.NewNetwork(new int[] { 1 }, 1); externalNetworkLink = network; } string result = network.LoadFromFile(tmpString); if (result.Length > 0) { return(result); } return(""); }
/// <summary> /// Calculates outputs of the layer regarding inputs. /// </summary> /// <param name="inputVector">Vector of inputs.</param> /// <param name="resultVector">Vector of resulting outputs of the layer's neurons.</param> /// <returns>Empty string on success or error message.</returns> public string React(float[] inputVector, ref float[] resultVector) { // CHECKING INPUT VECTOR LENGTH TO MATCH THE INPUT NUMBER if (inputVector.Length != coeffs[0].Length) { return("\n-> Error calculating layer reaction on input vector with length " + inputVector.Length.ToString() + " instead of " + coeffs[0].Length.ToString() + " ."); } // CHECKING RESULT VECTOR LENGTH TO MATCH THE NEURON NUMBER if (resultVector.Length != neuronDeltas.Length) { return("\n-> Error calculating layer reaction with neurons number " + neuronDeltas.Length.ToString() + " to vector wiht length " + resultVector.Length.ToString() + " ."); } // CALCULATING REACTIONS OF EACH NEURON float softMaxSum = 0; for (int neuronInd = 0; neuronInd < neuronDeltas.Length; neuronInd++) { // CALCULATING SUMM OF WEIGHTENED INPUTS FOR CURRENT NEURON float sum = biases[neuronInd]; for (int inputInd = 0; inputInd < coeffs[neuronInd].Length; inputInd++) { sum += inputVector[inputInd] * coeffs[neuronInd][inputInd]; } resultVector[neuronInd] = AFType.CalcActivation(sum, ANNetwork.SayNetworkAlpha()); softMaxSum += resultVector[neuronInd]; } // FOR SOFTMAX FUNCTION ADDITIONAL OPERATION if (AFType.ToString() == AFTypeSoftMax.name) { for (int neuronInd = 0; neuronInd < resultVector.Length; neuronInd++) { resultVector[neuronInd] /= softMaxSum; } } return(""); }
// SET NETWORK LINK ON EXTERNAL NETWORK public void SetExternalLink(ref ANNetwork externalLink) { network = externalLink; }
////////// ******************************************************************************************************************************* ////////// ******************************************************************************************************************************* ////////// NETWORK FUNCTIONS // CREATE NEW NETWORK public string CreateNewANN(string tmpString, ref ANNetwork externalNetworkLink) { /* newANN <inputsNumber> <layersNumber> <L1NeuronNumber>... * (<alpha> <CostFunctionIndex> {<L1ActivationFunctionIndex>...} {<L1LowerBoundary>... <L1UpperBoundary>...} <scaleBoolean>)*/ bool isLast = true; string result = ""; // PARSING INPUTS NUMBER if (tmpString.Length == 0) { return("\n-> Error creating new network: no <inputsNumber> argument entered."); } int inputsNumber = -1; result = Parsing.ParseInt(ref tmpString, ref inputsNumber, ref isLast, "inputsNumber", Parsing.AfterParsingCheck.Positive); if (result.Length > 0) { return("\n-> Error creating new network. " + result); } if (isLast) { return("\n-> Error creating new network: no <layersNumber> argument entered."); } // PARSING LAYER NUMBER int layersNumber = -1; result = Parsing.ParseInt(ref tmpString, ref layersNumber, ref isLast, "inputsNumber", Parsing.AfterParsingCheck.Positive); if (result.Length > 0) { return("\n-> Error creating new network. " + result); } // PARSING LAYER NEURONS NUMBER FOR EACH LAYER int[] neuronNumbers = new int[layersNumber]; for (int layerInd = 0; layerInd < layersNumber; layerInd++) { result = Parsing.ParseInt(ref tmpString, ref neuronNumbers[layerInd], ref isLast, "<LxNeuronNumber>", Parsing.AfterParsingCheck.Positive); if (result.Length > 0) { return("\n-> Error creating new network. " + result); } if ((isLast) && (layerInd < layersNumber - 1)) { return("\n-> Error creating new network: no <LxNeuronNumber> for layer " + layerInd.ToString() + " ."); } } if (isLast) { network = ANNetwork.NewNetwork(neuronNumbers, inputsNumber); externalNetworkLink = network; return(""); } // PARSING TRAINING SPEED IF ABLE float learningSpeed = -1; result = Parsing.ParseFloat(ref tmpString, ref learningSpeed, ref isLast, "trainingSpeed", Parsing.AfterParsingCheck.Positive); if (result.Length > 0) { return("\n-> Error creating new network. " + result); } if (isLast) { network = ANNetwork.NewNetwork(neuronNumbers, inputsNumber, learningSpeed); externalNetworkLink = network; return(""); } // PARSING ALPHA IF ABLE float alpha = -1; result = Parsing.ParseFloat(ref tmpString, ref alpha, ref isLast, "alpha", Parsing.AfterParsingCheck.Positive); if (result.Length > 0) { return("\n-> Error creating new network. " + result); } if (isLast) { network = ANNetwork.NewNetwork(neuronNumbers, inputsNumber, learningSpeed, alpha); externalNetworkLink = network; return(""); } // PARSING LYAMBDA IF ABLE float lyambda = -1; result = Parsing.ParseFloat(ref tmpString, ref lyambda, ref isLast, "lyambda", Parsing.AfterParsingCheck.NonNegative); if (result.Length > 0) { return("\n-> Error creating new network. " + result); } if (isLast) { network = ANNetwork.NewNetwork(neuronNumbers, inputsNumber, learningSpeed, alpha, lyambda); externalNetworkLink = network; return(""); } // PARSING COST FUNCTION CFTypeBase CFType = null; result = Parsing.ParseCFType(ref tmpString, ref CFType, ref isLast); if (result.Length > 0) { return(result); } if (isLast) { network = ANNetwork.NewNetwork(neuronNumbers, inputsNumber, learningSpeed, alpha, lyambda, CFType); externalNetworkLink = network; return(""); } // PARSING ACTIVATION FUNCTIONS FOR ALL LAYERS AFTypeBase[] AFTypes = new AFTypeBase[layersNumber]; result = Parsing.ParseAFTypesArray(ref tmpString, ref AFTypes, ref isLast); if (result.Length > 0) { return(result); } if (isLast) { network = ANNetwork.NewNetwork(neuronNumbers, inputsNumber, learningSpeed, alpha, lyambda, CFType, AFTypes); externalNetworkLink = network; return(""); } // PARSING INIT SCALE bool initScale; int initScaleInt = -1; result = Parsing.ParseInt(ref tmpString, ref initScaleInt, ref isLast, "scaleBoolean", Parsing.AfterParsingCheck.NonNegative); if (result.Length > 0) { return("\n-> Error creating new network. " + result); } if ((initScaleInt != 0) && (initScaleInt != 1)) { return("\n-> Error creating new network: non-boolean initScale " + initScaleInt.ToString() + " ."); } initScale = initScaleInt == 0 ? false : true; if (isLast) { network = ANNetwork.NewNetwork(neuronNumbers, inputsNumber, learningSpeed, alpha, lyambda, CFType, AFTypes, initScale); externalNetworkLink = network; return(""); } // PARSING LOWER BOUNDARY AND UPPER BOUNDARY float[] lowerBoundaries = new float[layersNumber]; result = Parsing.ParseFloatArray(ref tmpString, ref lowerBoundaries, ref isLast, "lower boundaries", Parsing.AfterParsingCheck.NoCheck); if (result.Length > 0) { return("\n-> Error creating new network. " + result); } if (isLast) { return("\n-> Error creating new network: no text for upper boundaties "); } float[] upperBoundaries = new float[layersNumber]; result = Parsing.ParseFloatArray(ref tmpString, ref upperBoundaries, ref isLast, "upper boundaries", Parsing.AfterParsingCheck.NoCheck); if (result.Length > 0) { return("\n-> Error creating new network. " + result); } network = ANNetwork.NewNetwork(neuronNumbers, inputsNumber, learningSpeed, alpha, lyambda, CFType, AFTypes, initScale, lowerBoundaries, upperBoundaries); externalNetworkLink = network; return(""); }
// CONSTRUNCTOR public Commands(ref ANNetwork programNetwork) { network = programNetwork; examples = Examples.Init(); }
static void Main(string[] args) { ANNetwork currANN = null; string command; Commands allCommands = new Commands(ref currANN); allCommands.Welcome(); // WORKING CYCLE while (true) { Console.Write("\n<- "); command = Console.ReadLine(); command = command.Trim(); string lowerCommand = command.ToLower(); if (lowerCommand.Length > 0) { // HELP COMMAND ENTERED? if (lowerCommand.IndexOf("help") == 0) { allCommands.Help(); } // NEW NETWORK COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("newnet") == 0) { string result = allCommands.CreateNewANN(command.Remove(0, 6).Trim(), ref currANN); if (result.Length > 0) { Console.WriteLine(result); } else { Console.WriteLine("\n-> Network created successfully."); } } // SAVENET COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("savenet") == 0) { string result = allCommands.SaveNetwork(command.Remove(0, 7).Trim()); if (result.Length > 0) { Console.WriteLine(result); } else { Console.WriteLine("\n-> Network saved to the file successfully."); } } // LOADNET COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("loadnet") == 0) { string result = allCommands.LoadNetwork(command.Remove(0, 7).Trim(), ref currANN); if (result.Length > 0) { Console.WriteLine(result); } else { Console.WriteLine("\n-> Network loaded from the file successfully."); } } // SHOW COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("shownet") == 0) { string result = allCommands.ShowNet(command.Remove(0, 7).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // LISTNET COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("listnet") == 0) { allCommands.ListNets(); } // LISTEX COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("listex") == 0) { allCommands.ListExs(); } // CHANGE INPUTS NUMBER COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("setinpnum") == 0) { string result = allCommands.SetInputsNumber(command.Remove(0, 9).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // CHANGE COST FUNCTION COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("setcf") == 0) { string result = allCommands.SetCFType(command.Remove(0, 5).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // CHANGE ACTIVATION FUNCTION COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("setaf") == 0) { string result = allCommands.SetAFTypes(command.Remove(0, 5).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // CHANGE TRAINING SPEED COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("setls") == 0) { string result = allCommands.SetLearningSpeed(command.Remove(0, 5).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // CHANGE LYAMBDA COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("setlyambda") == 0) { string result = allCommands.SetLyambda(command.Remove(0, 10).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // RESET LESSONS NUMBER COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("resetless") == 0) { string result = allCommands.ResetLessonsNumber(command.Remove(0, 9).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // NEW EXAMPLE COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("newex") == 0) { string result = allCommands.NewExample(command.Remove(0, 5).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // SAVE EXAMPLES COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("saveex") == 0) { string result = allCommands.SaveExamples(command.Remove(0, 6).Trim()); if (result.Length > 0) { Console.WriteLine(result); } else { Console.WriteLine("\n-> Examples saved to the file successfully."); } } // LOADEX COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("loadex") == 0) { string result = allCommands.LoadExamples(command.Remove(0, 6).Trim()); if (result.Length > 0) { Console.WriteLine(result); } else { Console.WriteLine("\n-> Examples loaded from the file successfully."); } } // LOADCSV COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("loadcsv") == 0) { string result = allCommands.LoadExamplesFromCSV(command.Remove(0, 7).Trim()); if (result.Length > 0) { Console.WriteLine(result); } else { Console.WriteLine("\n-> Examples loaded from the file successfully."); } } // SHOW EXAMPLES COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("showex") == 0) { string result = allCommands.ShowExamples(command.Remove(0, 6).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // SHOW EXAMPLE BY INDEX COMMAND? else if (lowerCommand.Trim().IndexOf("show1ex") == 0) { string result = allCommands.ShowExampleWithIndex(command.Remove(0, 7).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // DIVIDE COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("divex") == 0) { string result = allCommands.DivideExamples(command.Remove(0, 5).Trim()); if (result.Length > 0) { Console.WriteLine(result); } else { Console.WriteLine("\n-> Examples divided successfully."); } } // TRAIN EXAMPLES COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("trainex") == 0) { string result = allCommands.TrainExamples(command.Remove(0, 7).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // RUN EXAMPLE COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("runex") == 0) { string result = allCommands.RunExample(command.Remove(0, 5).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // CLEAR EXAMPLES COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("clearex") == 0) { string result = allCommands.ClearExamples(); if (result.Length > 0) { Console.WriteLine(result); } } // COST COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("cost") == 0) { string result = allCommands.Cost(); if (result.Length > 0) { Console.WriteLine(result); } } // TRAINING EXAMPLES ERROR CALCULATION COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("exerr") == 0) { string result = allCommands.CalcTrainingError(command.Remove(0, 5).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // MINIMAL EXAMPLES ERROR CALCULATION COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("minerr") == 0) { string result = allCommands.CalcMinError(command.Remove(0, 6).Trim()); if (result.Length > 0) { Console.WriteLine(result); } } // EXIT COMMAND ENTERED? else if (lowerCommand.Trim().IndexOf("exit") == 0) { break; } // UNKNOWN COMMAND! else { Console.WriteLine("\n-> Error - unknown command: " + command); } } } allCommands.Goodbye(); Console.ReadLine(); }
/// <summary> /// Performing a gradient decent back propogation through the layer. /// </summary> /// <param name="layerInputs">Vector of inputs (outputs of previous layers or inputs of the example).</param> /// <param name="layerOutputs">Vector of outputs of the layer, already calculated previously.</param> /// <param name="inDerivatives">Vector of incomming from next (in network architecture) layer or from cost function derivatives for their back propogation.</param> /// <param name="outDerivatives">Vector of outcomming derivatives for previous (in network architecture) layer.</param> /// <param name="lSpeed">Learning speed for deltas calculations.</param> /// <param name="examplesNum">Number of examples in a batch for delta sums scaling.</param> /// <returns>Empty string on success or error message.</returns> public string BPropogate(float[] layerInputs, float[] layerOutputs, float[] inDerivatives, ref float[] outDerivatives, float lSpeed, int examplesNum) { // CHECKING INPUTED VECTOR'S LENGTH TO MATCH LAYERS NEURON COEFFICIENTS NUMBER if (layerInputs.Length != coeffs[0].Length) { return("Inputs vector length (" + layerInputs.Length.ToString() + ") don't match layer neurons' coefficients number (" + coeffs[0].Length.ToString() + ")."); } if (layerOutputs.Length != neuronDeltas.Length) { return("Outputs vector length (" + layerOutputs.Length.ToString() + ") don't match layers neuron number (" + neuronDeltas.Length.ToString() + ")."); } if (inDerivatives.Length != neuronDeltas.Length) { return("Derivatives vector length (" + inDerivatives.Length.ToString() + ") don't match layers neuron number (" + neuronDeltas.Length.ToString() + ")."); } // PREPARING VECTOR OF DERIVATIVES FOR NEXT (ACTUALY PREVIOUS IN NETWORK ARCHITECTURE) LAYER - NUMBER OF SUMS = NUMBER OF INPUTS (NEURONS IN PREVIOUS LAYER) float[] nextDerivatives = new float[coeffs[0].Length]; for (int inputInd = 0; inputInd < nextDerivatives.Length; inputInd++) { nextDerivatives[inputInd] = 0; } // CALCULATING DELTAS OF CURRENT LAYER NEURONS AND DERIVATIVES FOR NEXT LAYER for (int neuronInd = 0; neuronInd < neuronDeltas.Length; neuronInd++) { // CALCULATING ACTIVATION FUNCTION DERIVATIVE USING THIS LAYER OUTPUT AND ALPHA try { neuronDeltas[neuronInd] = AFType.CalcDerivative(layerOutputs[neuronInd], ANNetwork.SayNetworkAlpha()) * inDerivatives[neuronInd]; } catch (Exception e) { return("Error calculating activation function for neuron " + neuronInd.ToString() + " . " + e.Message); } // CALCULATING EACH COEFFICIENT OF CURRENT NEURON DELTA TO INCREASE DELTA SUM for (int inputInd = 0; inputInd < coeffDeltaSums[neuronInd].Length; inputInd++) { // SINGLE COEFFICIENT DELTA INCREASE IS CALCULATED WITH NEURON DELTA, INPUTED TO THIS COEFFICIENT VALUE, LEARNING SPEED AND NUMBER OF EXAMPLES TO GET MEAN coeffDeltaSums[neuronInd][inputInd] += neuronDeltas[neuronInd] * layerInputs[inputInd] * lSpeed / examplesNum; // INCRESING DERIVATIVE SUM OF CURRENT INPUT NEURON OF NEXT LAYER nextDerivatives[inputInd] += coeffs[neuronInd][inputInd] * neuronDeltas[neuronInd]; } // BIAS DELTA INCREASE IS CALCULATED WITH NEURON DELTA, LEARNING SPEED AND NUMBER OF EXAMPLES TO GET MEAN biasDeltaSums[neuronInd] += neuronDeltas[neuronInd] * lSpeed / examplesNum; } outDerivatives = nextDerivatives; return(""); }