public void GetSoftMaxLogOutput() { Matrix <double> matrix = Matrix <double> .Build.DenseOfArray( new double[, ] { { 1, 0 }, { 0, 2 } } ); Matrix <double> softMax = SoftMax.GetSoftMaxLogOutput(matrix); double lognorm1 = Math.Log(Math.Exp(1) + Math.Exp(0)); double lognorm2 = Math.Log(Math.Exp(0) + Math.Exp(2)); Matrix <double> softmax_check = Matrix <double> .Build.DenseOfArray( new double[, ] { { 1 - lognorm1, 0 - lognorm2 }, { 0 - lognorm1, 2 - lognorm2 } } ); for (int i = 0; i < softMax.RowCount; i++) { for (int j = 0; j < softMax.ColumnCount; j++) { Assert.AreEqual(softmax_check[i, j], softMax[i, j], 0.00000001); } } }
public static ushort EnCodeIConduction(IConduction conduction) { return(conduction switch { Sigmoind _ => 1, ReLU _ => 2, SoftReLU _ => 3, Straight _ => 4, SoftMax _ => 5, _ => throw new ArgumentException( nameof(conduction), $"this type of IConduction is not registered. {conduction}"), });
private void calculateActivatedSumsForLayer <T>(ref IList <T> layer, ActivationAlgorithm activationAlgorithm) { //OutputNode if (typeof(T) == typeof(OutputNode)) { foreach (OutputNode oNode in (List <OutputNode>)layer) { foreach (Connector connector in oNode.InboundConnectors) { oNode.Val += connector.Weight * connector.FromNode.Val; } if (activationAlgorithm == ActivationAlgorithm.Sigmoid) { var sigmoid = new Sigmoid(); IActivationFormulaDelegate activationFormulaDelegate = sigmoid; oNode.Val = activationFormulaDelegate.applyActivation(oNode.Val); } else if (activationAlgorithm == ActivationAlgorithm.HyperTan) { var hTan = new HyperTan(); IActivationFormulaDelegate activationFormulaDelegate = hTan; oNode.Val = activationFormulaDelegate.applyActivation(oNode.Val); } } if (activationAlgorithm == ActivationAlgorithm.SoftMax) { int index = 0; double[] temp = new double[layer.Count]; foreach (OutputNode oNode in (List <OutputNode>)layer) { temp[index] = oNode.Val; index++; } index = 0; var softMax = new SoftMax(); IActivationFormulaDelegate activationFormulaDelegate = softMax; temp = activationFormulaDelegate.applyActivation(temp); foreach (OutputNode oNode in (List <OutputNode>)layer) { oNode.Val = temp[index]; index++; } } } else if (typeof(T) == typeof(HiddenNode)) { //HiddenNode foreach (HiddenNode hNode in (List <HiddenNode>)layer) { foreach (Connector connector in hNode.InboundConnectors) { hNode.Val += connector.Weight * connector.FromNode.Val; } if (activationAlgorithm == ActivationAlgorithm.Sigmoid) { var sigmoid = new Sigmoid(); IActivationFormulaDelegate activationFormulaDelegate = sigmoid; hNode.Val = activationFormulaDelegate.applyActivation(hNode.Val); } else if (activationAlgorithm == ActivationAlgorithm.HyperTan) { var hTan = new HyperTan(); IActivationFormulaDelegate activationFormulaDelegate = hTan; hNode.Val = activationFormulaDelegate.applyActivation(hNode.Val); } } if (activationAlgorithm == ActivationAlgorithm.SoftMax) { int index = 0; double[] temp = new double[layer.Count]; foreach (HiddenNode oNode in (List <HiddenNode>)layer) { temp[index] = oNode.Val; index++; } index = 0; var softMax = new SoftMax(); IActivationFormulaDelegate activationFormulaDelegate = softMax; temp = activationFormulaDelegate.applyActivation(temp); foreach (HiddenNode oNode in (List <HiddenNode>)layer) { oNode.Val = temp[index]; index++; } } } }