private FlattenedImage[] GetCrossentropyError(double[] output, double[] expected, char c) { FlattenedImage[] result = new FlattenedImage[output.Length]; double[] expectedExp = MatrixUtils.GetExp(expected); double expectedSum = expected.Sum(); //int cInt = (int)(c - 65); //for (int i = 0; i < output.Length; i++) //{ // if (i == cInt) // { // double log = -Math.Log(output[i]); // result[i] = new FlattenedImage(1, new double[1] { log }); // } // else // { // result[i] = new FlattenedImage(1, new double[1] { 0 }); // } //} for (int i = 0; i < output.Length; i++) { result[i] = new FlattenedImage(1, new double[1] { expectedExp[i] / expectedSum - output[i] }); } return(result); }
public override double Sum() { double result = 0; for (int c = 0; c < NumberOfChannels; c++) { result += MatrixUtils.ElementSum(Channels[c].Values); } return(result); }
public double[,] Convolve(FilteredImageChannel input, bool samePadding) { //double[,] flippedKernel = MatrixUtils.Rotate180(Values); //int fullSize = Size * Size; if (samePadding) { return(MatrixUtils.ConvolveSame(input.Values, Values)); } return(MatrixUtils.Convolve(input.Values, Values)); //for (int i = 0; i < Size; i++) //{ // for (int j = 0; j < Size; j++) // { // convResult[i, j] /= fullSize; // } //} }
public FilteredImageChannel Backpropagate(FilteredImageChannel previous, FilteredImageChannel nextErrors, int totalKernels, double learningRate, bool samePadding) { //double[,] kernelDerivatives = new double[1,1]; //Task t = Task.Run(() => //{ // if (samePadding) // { // kernelDerivatives = MatrixUtils.ConvolveSame(previous.Values, nextErrors.Values); // } // else // { // kernelDerivatives = MatrixUtils.Convolve(previous.Values, nextErrors.Values); // } //}); //double[,] flippedKernel = MatrixUtils.Rotate180(Values); ////double[,] newErrors = MatrixUtils.Convolve(flippedKernel, nextErrors.Values); //double[,] newErrors; //if (samePadding) //{ // newErrors = MatrixUtils.FullConvolutionSame(flippedKernel, nextErrors.Values); //} //else //{ // newErrors = MatrixUtils.FullConvolution(flippedKernel, nextErrors.Values); //} //t.Wait(); //for (int i = 0; i < Size; i++) //{ // for(int j = 0; j < Size; j++) // { // Values[i, j] -= learningRate * kernelDerivatives[i, j]; // } //} //ComputeElementSum(); //return new FilteredImageChannel(Size, newErrors); //double[,] flippedErrors = MatrixUtils.Rotate180(nextErrors.Values); double[,] deltaWeights = new double[1, 1]; Task t1 = Task.Run(() => { if (samePadding) { deltaWeights = MatrixUtils.ConvolveSame(previous.Values, nextErrors.Values); } else { deltaWeights = MatrixUtils.Convolve(previous.Values, nextErrors.Values); } }); //double[,] flippedKernel = MatrixUtils.Rotate180(Values); double[,] newErrors; if (samePadding) { newErrors = MatrixUtils.Convolve(Values, nextErrors.Values); } else { newErrors = MatrixUtils.FullConvolution(Values, nextErrors.Values); } t1.Wait(); double[,] newWeights = new double[Size, Size]; for (int i = 0; i < Size; i++) { for (int j = 0; j < Size; j++) { newWeights[i, j] = Values[i, j] - learningRate * deltaWeights[i, j]; } } //Values = MatrixUtils.Rotate180(newWeights); Values = newWeights; return(new FilteredImageChannel(newErrors.GetLength(0), newErrors)); }