Example #1
0
        public double Train(Mat <double> image, double[] targetData)
        {
            if ((image.Row != imageSize.height) ||
                (image.Column != imageSize.width))
            {
                throw new Exception();
            }

            int convLevel   = 0;
            int outputLevel = 0;

            List <Mat <double> > inputImages = new List <Mat <double> >();

            inputImages.Add(image);

            convLayersOutput.Clear();

            convLayersOutput.Add(inputImages);
            foreach (ECNNLayer layer in layers)
            {
                switch (layer)
                {
                case ECNNLayer.ConvLayer:
                    inputImages = ApplyConvolution(inputImages, kernels[convLevel], strides[convLevel]);
                    inputImages = ApplyActivations(inputImages, EActivation.ReLU);
                    convLayersOutput.Add(inputImages);
                    convLevel++;
                    break;

                case ECNNLayer.PoolingLayer:
                    inputImages = ApplyPooling(inputImages);
                    convLayersOutput.Add(inputImages);
                    break;
                }
                outputLevel++;
            }

            fcLayersOutput.Clear();

            int inputNodesCount = layersNodes[0];

            double[] inputNodes = new double[inputNodesCount];

            foreach (Mat <double> inputImage in inputImages)
            {
                double[] arr = Mat <double> .ConvertToArr(inputImage);

                Array.Copy(arr, inputNodes, arr.Length);
            }

            Mat <double> nodeMat = new Mat <double>(1, inputNodesCount, inputNodes);

            fcLayersOutput.Add(nodeMat);

            int    layerDepth = 0;
            double ratio      = 0.85;

            for (; layerDepth < fcDepth - 2; layerDepth++)
            {
                Mat <double> netMat = Mat <double> .Mul(fcLayersOutput[layerDepth], nodeWeights[layerDepth]);

                AddBias(netMat, biasWeights[layerDepth]);

                Dropout(netMat, ratio);

                ApplyActivation(netMat, EActivation.ReLU);
                fcLayersOutput.Add(netMat);
            }
            Mat <double> outMat = Mat <double> .Mul(fcLayersOutput[layerDepth], nodeWeights[layerDepth]);

            AddBias(outMat, biasWeights[layerDepth]);

            ApplyActivation(outMat, EActivation.Sigmoid);
            fcLayersOutput.Add(outMat);

            double[] output = Mat <double> .ConvertToArr(outMat);

            double[] errors = MSE(output, targetData);

            double errorSum = 0;

            foreach (double val in errors)
            {
                errorSum += val;
            }

            for (int i = 0; i < fcDepth - 1; i++)
            {
                double[] outputMat = Mat <double> .ConvertToArr(fcLayersOutput[fcLayersOutput.Count - 1 - i]);

                double[] inputMat = Mat <double> .ConvertToArr(fcLayersOutput[fcLayersOutput.Count - 1 - i - 1]);

                errors = UpdateWeight(layersNodes[fcDepth - i - 2], layersNodes[fcDepth - i - 1],
                                      errors,
                                      outputMat,
                                      inputMat,
                                      nodeWeights[fcDepth - i - 2], biasWeights[fcDepth - i - 2],
                                      i == 0 ? true : false);
            }

            List <List <Mat <double> > > convGradients = new List <List <Mat <double> > >();
            List <Mat <double> >         errorMats     = Mat <double> .ConvertToMats(errors, kernelLen);

            for (int i = convLayersOutput.Count - 1; i > 0; i--)
            {
                switch (layers[i - 1])
                {
                case ECNNLayer.ConvLayer:
                    // i 번째가 출력 , i - 1번째가 입력?
                    convGradients.Add(errorMats);
                    convLevel--;
                    UpdateKernel(errorMats, convLayersOutput[i], convLayersOutput[i - 1], convLevel);
                    errorMats = PropagateConvGradient(errorMats, convLayersOutput[i], convLevel);
                    break;

                case ECNNLayer.PoolingLayer:
                    errorMats = PropagatePoolGradient(errorMats, convLayersOutput[i], convLayersOutput[i - 1]);
                    break;
                }
            }

            return(errorSum);
        }
Example #2
0
        public double[] Query(Mat <double> image)
        {
            if ((image.Row != imageSize.height) ||
                (image.Column != imageSize.width))
            {
                throw new Exception();
            }

            int convLevel   = 0;
            int outputLevel = 0;

            List <Mat <double> > inputImages = new List <Mat <double> >();

            inputImages.Add(image);

            foreach (ECNNLayer layer in layers)
            {
                switch (layer)
                {
                case ECNNLayer.ConvLayer:
                    inputImages = ApplyConvolution(inputImages, kernels[convLevel], strides[convLevel]);
                    inputImages = ApplyActivations(inputImages, EActivation.ReLU);
                    convLevel++;
                    break;

                case ECNNLayer.PoolingLayer:
                    inputImages = ApplyPooling(inputImages);
                    break;
                }
                outputLevel++;
            }

            fcLayersOutput.Clear();

            int inputNodesCount = layersNodes[0];

            double[] inputNodes = new double[inputNodesCount];

            foreach (Mat <double> inputImage in inputImages)
            {
                double[] arr = Mat <double> .ConvertToArr(inputImage);

                Array.Copy(arr, inputNodes, arr.Length);
            }

            Mat <double> nodeMat = new Mat <double>(1, inputNodesCount, inputNodes);

            fcLayersOutput.Add(nodeMat);

            int layerDepth = 0;

            for (; layerDepth < fcDepth - 2; layerDepth++)
            {
                Mat <double> netMat = Mat <double> .Mul(fcLayersOutput[layerDepth], nodeWeights[layerDepth]);

                AddBias(netMat, biasWeights[layerDepth]);

                ApplyActivation(netMat, EActivation.ReLU);
                fcLayersOutput.Add(netMat);
            }

            Mat <double> outMat = Mat <double> .Mul(fcLayersOutput[layerDepth], nodeWeights[layerDepth]);

            AddBias(outMat, biasWeights[layerDepth]);

            ApplyActivation(outMat, EActivation.Sigmoid);
            fcLayersOutput.Add(outMat);

            double[] output = Mat <double> .ConvertToArr(outMat);

            return(output);
        }