private FlattenedImage[] GetCrossentropyLoss(double[] output, char c) { FlattenedImage[] result = new FlattenedImage[output.Length]; int cInt = (int)(c - 65); //int cInt = (int)(c - 48); for (int i = 0; i < output.Length; i++) { if (i == cInt) { double log = -1.0 / output[i]; result[i] = new FlattenedImage(1, new double[1] { log }); } else { result[i] = new FlattenedImage(1, new double[1] { 0 }); } } return(result); }
private FlattenedImage[] GetCrossentropyError(double[] output, double[] expected, char c) { FlattenedImage[] result = new FlattenedImage[output.Length]; double[] expectedExp = MatrixUtils.GetExp(expected); double expectedSum = expected.Sum(); //int cInt = (int)(c - 65); //for (int i = 0; i < output.Length; i++) //{ // if (i == cInt) // { // double log = -Math.Log(output[i]); // result[i] = new FlattenedImage(1, new double[1] { log }); // } // else // { // result[i] = new FlattenedImage(1, new double[1] { 0 }); // } //} for (int i = 0; i < output.Length; i++) { result[i] = new FlattenedImage(1, new double[1] { expectedExp[i] / expectedSum - output[i] }); } return(result); }
public override void CompileLayer(NetworkLayer previousLayer) { PreviousLayer = previousLayer; if (Output == null) { FilteredImage previous = (FilteredImage)previousLayer.GetData(); Output = new FlattenedImage(previous.Size * previous.Size * previous.NumberOfChannels); } }
public override FlattenedImage Activate(FlattenedImage img) { double[] result = new double[img.Size]; for (int i = 0; i < img.Size; i++) { result[i] = ActivateValue(img.Values[i]); } return(new FlattenedImage(img.Size, result)); }
public override FlattenedImage GetDerivative(FlattenedImage output) { double[] result = new double[output.Size]; for (int i = 0; i < output.Size; i++) { result[i] = GetValueDerivative(output.Values[i]); } return(new FlattenedImage(output.Size, result)); }
public double ComputeOutput(FlattenedImage image) { double total = 0; for (int i = 0; i < NumberOfWeights; i++) { total += Weights[i] * image.Values[i]; } Output = total + Bias; return(Output); }
public override FlattenedImage GetDerivative(FlattenedImage gradient, int correctClass) { //FlattenedImage image = (FlattenedImage)output; //double[] result = new double[image.Size]; //double totalSum = 0; //for(int i = 0; i < image.Size; i++) //{ // totalSum += Math.Exp(image.Values[i]); //} //for(int i = 0; i < image.Size; i++) //{ // double e = Math.Exp(image.Values[i]); // result[i] = (e * (totalSum - e)) / (totalSum * totalSum); //} //return new FlattenedImage(image.Size, result); double correctClassGradient = gradient.Values[0]; double[] result = new double[lastOutput.Size]; double totalSum = 0; double maxx = lastOutput.Values.Max(); for (int i = 0; i < lastOutput.Size; i++) { result[i] = Math.Exp(lastOutput.Values[i]);// - maxx); totalSum += result[i]; } double squareSum = totalSum * totalSum; for (int i = 0; i < result.Length; i++) { if (i == correctClass) { result[i] = correctClassGradient * ((result[i] * (totalSum - result[i])) / squareSum); } else { result[i] = correctClassGradient * ((-result[correctClass] * result[i]) / squareSum); } } return(new FlattenedImage(result.Length, result)); }
public override void CompileLayer(NetworkLayer previousLayer) { PreviousLayer = previousLayer; FlattenedImage previous = (FlattenedImage)PreviousLayer.GetData(); Output = new FlattenedImage(NumberOfUnits); if (Units == null) { Units = new Unit[NumberOfUnits]; for (int i = 0; i < NumberOfUnits; i++) { Units[i] = new Unit(previous.Size); } } }
private FlattenedImage[] GetErrorArray(double[] actualOutput, double[] expectedOutput) { FlattenedImage[] result = new FlattenedImage[actualOutput.Length]; for (int i = 0; i < actualOutput.Length; i++) { double[] value = new double[1]; //value[0] = GetError(actualOutput[i], expectedOutput[i]); double d = actualOutput[i] - expectedOutput[i]; value[0] = 1.0 / 2.0 * (d * d); result[i] = new FlattenedImage(1, value); } return(result); }
private void InitializeOutput() { if (Output == null) { LayerOutput previousData = PreviousLayer.GetData(); if (previousData is FlattenedImage) { FlattenedImage previous = (FlattenedImage)previousData; Output = new FlattenedImage(previous.Size); } else { FilteredImage previous = (FilteredImage)previousData; Output = new FilteredImage(previous.NumberOfChannels, previous.Size); } } }
public override void ComputeOutput() { FlattenedImage previous = (FlattenedImage)PreviousLayer.GetData(); Task[] tasks = new Task[NumberOfUnits]; for (int i = 0; i < NumberOfUnits; i++) { int taski = 0 + i; tasks[taski] = Task.Run(() => { Output.Values[taski] = Units[taski].ComputeOutput(previous); }); } Task.WaitAll(tasks); Output = ActivationFunction.Activate(Output); }
private void ComputeFlattenedImage() { FlattenedImage previous = (FlattenedImage)PreviousLayer.GetData(); double[] newValues = new double[previous.Size]; for (int i = 0; i < previous.Size; i++) { if (GlobalRandom.GetRandomDouble() < Rate) { newValues[i] = 0; } else { newValues[i] = previous.Values[i]; } } Output = new FlattenedImage(previous.Size, newValues); }
public override FlattenedImage Activate(FlattenedImage img) { lastOutput = new FlattenedImage(img.Size, CopyArray(img.Values)); double sum = 0; double[] result = new double[img.Size]; double maxx = img.Values.Max(); for (int i = 0; i < img.Size; i++) { result[i] = Math.Exp(img.Values[i]);// - maxx); sum += result[i]; } for (int i = 0; i < img.Size; i++) { result[i] /= sum; } return(new FlattenedImage(img.Size, result)); }
public virtual FlattenedImage GetDerivative(FlattenedImage output, int correctClass) { throw new Exception(); }
public virtual FlattenedImage GetDerivative(FlattenedImage output) { throw new Exception(); }
public abstract FlattenedImage Activate(FlattenedImage img);
public override LayerOutput[] Backpropagate(LayerOutput[] nextOutput, double learningRate) { int weightsPerUnit = Units[0].NumberOfWeights; FlattenedImage[] result = new FlattenedImage[weightsPerUnit]; FlattenedImage previous = (FlattenedImage)PreviousLayer.GetData(); for (int i = 0; i < weightsPerUnit; i++) { result[i] = new FlattenedImage(NumberOfUnits); } FlattenedImage activationDerivative = ActivationFunction.GetDerivative(Output); Task[] tasks = new Task[NumberOfUnits]; for (int unit = 0; unit < NumberOfUnits; unit++) { int tasku = 0 + unit; tasks[tasku] = Task.Run(() => { Unit unitAux = Units[tasku]; FlattenedImage nextErrors = (FlattenedImage)nextOutput[tasku]; double unitSum = nextErrors.Values.Sum(); double unitDerivative = unitSum * activationDerivative.Values[tasku]; for (int weight = 0; weight < unitAux.NumberOfWeights; weight++) { Monitor.Enter(result); result[weight].Values[tasku] = unitDerivative * unitAux.Weights[weight]; Monitor.Exit(result); double deltaW = unitDerivative * previous.Values[weight]; unitAux.Weights[weight] -= learningRate * deltaW; } }); } Task.WaitAll(tasks); //for (int unit = 0; unit < NumberOfUnits; unit++) //{ // Unit unitAux = Units[unit]; // FlattenedImage nextErrors = (FlattenedImage)nextOutput[unit]; // double unitSum = nextErrors.Sum(); // double unitDerivative = unitSum * activationDerivative.Values[unit]; // for (int weight = 0; weight < unitAux.NumberOfWeights; weight++) // { // Monitor.Enter(result); // result[weight].Values[unit] = unitDerivative * unitAux.Weights[weight]; // Monitor.Exit(result); // double deltaW = unitDerivative * previous.Values[weight]; // unitAux.Weights[weight] -= learningRate * deltaW; // } //} return(result); }
public FlattenLayer(FlattenedImage output = null) : base("Flatten") { Output = output; }
private double[] GetOutput() { FlattenedImage result = (FlattenedImage)NetworkLayers[NetworkLayers.Count - 1].GetData(); return(result.Values); }