private List <Mat <double> > ApplyPooling(List <Mat <double> > inputImages) { List <Mat <double> > pooledImages = new List <Mat <double> >(); foreach (Mat <double> inputImage in inputImages) { if ((inputImage.Row & 1) != 0 || (inputImage.Column & 1) != 0) { throw new Exception(); } int poolHeight = inputImage.Row >> 1; int poolWidth = inputImage.Column >> 1; double[] mask = new double[4]; Mat <double> pooledImage = new Mat <double>(poolHeight, poolWidth); for (int y = 0; y < inputImage.Row - 1; y++) { for (int x = 0; x < inputImage.Column - 1; x++) { for (int dy = 0; dy < 2; dy++) { for (int dx = 0; dx < 2; dx++) { mask[dy * 2 + dx] = inputImage.Element[y + dy, x + dx]; } } Array.Sort(mask); pooledImage.Element[y / 2, x / 2] = mask[3]; } } pooledImages.Add(pooledImage); } return(pooledImages); }
public double Train(Mat <double> image, double[] targetData) { if ((image.Row != imageSize.height) || (image.Column != imageSize.width)) { throw new Exception(); } int convLevel = 0; int outputLevel = 0; List <Mat <double> > inputImages = new List <Mat <double> >(); inputImages.Add(image); convLayersOutput.Clear(); convLayersOutput.Add(inputImages); foreach (ECNNLayer layer in layers) { switch (layer) { case ECNNLayer.ConvLayer: inputImages = ApplyConvolution(inputImages, kernels[convLevel], strides[convLevel]); inputImages = ApplyActivations(inputImages, EActivation.ReLU); convLayersOutput.Add(inputImages); convLevel++; break; case ECNNLayer.PoolingLayer: inputImages = ApplyPooling(inputImages); convLayersOutput.Add(inputImages); break; } outputLevel++; } fcLayersOutput.Clear(); int inputNodesCount = layersNodes[0]; double[] inputNodes = new double[inputNodesCount]; foreach (Mat <double> inputImage in inputImages) { double[] arr = Mat <double> .ConvertToArr(inputImage); Array.Copy(arr, inputNodes, arr.Length); } Mat <double> nodeMat = new Mat <double>(1, inputNodesCount, inputNodes); fcLayersOutput.Add(nodeMat); int layerDepth = 0; double ratio = 0.85; for (; layerDepth < fcDepth - 2; layerDepth++) { Mat <double> netMat = Mat <double> .Mul(fcLayersOutput[layerDepth], nodeWeights[layerDepth]); AddBias(netMat, biasWeights[layerDepth]); Dropout(netMat, ratio); ApplyActivation(netMat, EActivation.ReLU); fcLayersOutput.Add(netMat); } Mat <double> outMat = Mat <double> .Mul(fcLayersOutput[layerDepth], nodeWeights[layerDepth]); AddBias(outMat, biasWeights[layerDepth]); ApplyActivation(outMat, EActivation.Sigmoid); fcLayersOutput.Add(outMat); double[] output = Mat <double> .ConvertToArr(outMat); double[] errors = MSE(output, targetData); double errorSum = 0; foreach (double val in errors) { errorSum += val; } for (int i = 0; i < fcDepth - 1; i++) { double[] outputMat = Mat <double> .ConvertToArr(fcLayersOutput[fcLayersOutput.Count - 1 - i]); double[] inputMat = Mat <double> .ConvertToArr(fcLayersOutput[fcLayersOutput.Count - 1 - i - 1]); errors = UpdateWeight(layersNodes[fcDepth - i - 2], layersNodes[fcDepth - i - 1], errors, outputMat, inputMat, nodeWeights[fcDepth - i - 2], biasWeights[fcDepth - i - 2], i == 0 ? true : false); } List <List <Mat <double> > > convGradients = new List <List <Mat <double> > >(); List <Mat <double> > errorMats = Mat <double> .ConvertToMats(errors, kernelLen); for (int i = convLayersOutput.Count - 1; i > 0; i--) { switch (layers[i - 1]) { case ECNNLayer.ConvLayer: // i 번째가 출력 , i - 1번째가 입력? convGradients.Add(errorMats); convLevel--; UpdateKernel(errorMats, convLayersOutput[i], convLayersOutput[i - 1], convLevel); errorMats = PropagateConvGradient(errorMats, convLayersOutput[i], convLevel); break; case ECNNLayer.PoolingLayer: errorMats = PropagatePoolGradient(errorMats, convLayersOutput[i], convLayersOutput[i - 1]); break; } } return(errorSum); }
public double[] Query(Mat <double> image) { if ((image.Row != imageSize.height) || (image.Column != imageSize.width)) { throw new Exception(); } int convLevel = 0; int outputLevel = 0; List <Mat <double> > inputImages = new List <Mat <double> >(); inputImages.Add(image); foreach (ECNNLayer layer in layers) { switch (layer) { case ECNNLayer.ConvLayer: inputImages = ApplyConvolution(inputImages, kernels[convLevel], strides[convLevel]); inputImages = ApplyActivations(inputImages, EActivation.ReLU); convLevel++; break; case ECNNLayer.PoolingLayer: inputImages = ApplyPooling(inputImages); break; } outputLevel++; } fcLayersOutput.Clear(); int inputNodesCount = layersNodes[0]; double[] inputNodes = new double[inputNodesCount]; foreach (Mat <double> inputImage in inputImages) { double[] arr = Mat <double> .ConvertToArr(inputImage); Array.Copy(arr, inputNodes, arr.Length); } Mat <double> nodeMat = new Mat <double>(1, inputNodesCount, inputNodes); fcLayersOutput.Add(nodeMat); int layerDepth = 0; for (; layerDepth < fcDepth - 2; layerDepth++) { Mat <double> netMat = Mat <double> .Mul(fcLayersOutput[layerDepth], nodeWeights[layerDepth]); AddBias(netMat, biasWeights[layerDepth]); ApplyActivation(netMat, EActivation.ReLU); fcLayersOutput.Add(netMat); } Mat <double> outMat = Mat <double> .Mul(fcLayersOutput[layerDepth], nodeWeights[layerDepth]); AddBias(outMat, biasWeights[layerDepth]); ApplyActivation(outMat, EActivation.Sigmoid); fcLayersOutput.Add(outMat); double[] output = Mat <double> .ConvertToArr(outMat); return(output); }