Exemple #1
0
        // Three possibilities:
        // 2D -> 2D
        // 2D -> 1D
        // 1D -> 1D
        public NeuronMatrix(Layer l1, Layer l2)
        {
            Debug.Assert(!(l1.type == Layer.LAYER_1D && l2.type == Layer.LAYER_2D));

            if (l1 is Layer2D && l2 is Layer2D)
            {
                Layer2D prev = (Layer2D)l1;
                Layer2D curr = (Layer2D)l2;
                weight = new double[prev.depth, curr.depth, curr.kernelWidth, curr.kernelHeight];
                bias   = new double[curr.depth];
                initializeWeights(prev.depth * curr.kernelWidth * curr.kernelHeight);
            }
            else if (l1 is Layer2D && l2 is Layer1D)
            {
                Layer2D prev = (Layer2D)l1;
                Layer1D curr = (Layer1D)l2;
                weight = new double[prev.depth, prev.width, prev.height, curr.size];
                bias   = new double[curr.size];
                initializeWeights(prev.depth * prev.width * prev.height);
            }
            else if (l1 is Layer1D && l2 is Layer1D)
            {
                Layer1D prev = (Layer1D)l1;
                Layer1D curr = (Layer1D)l2;
                weight = new double[1, 1, prev.size, curr.size];
                bias   = new double[curr.size];
                initializeWeights(prev.size);
            }
        }
Exemple #2
0
        // back propagation ends at the input layer
        public override double[,,] backPropagate(double[,,] currActivated, double[,,] nextError, double learningRate)
        {
            if (nextLayer is Layer1D)
            {
                Layer1D next = (Layer1D)nextLayer;

                for (int i = 0; i < size; i++)
                {
                    for (int j = 0; j < next.size; j++)
                    {
                        next.matrix.weight[0, 0, i, j] -= currActivated[0, 0, i] * nextError[0, 0, j] * learningRate;
                    }
                }

                for (int i = 0; i < next.size; i++)
                {
                    next.matrix.bias[i] -= nextError[0, 0, i] * learningRate;
                }
            }
            else if (nextLayer is Layer2D)
            {
                throw new Exception("Invalid Layer");
            }

            return(null);
        }
Exemple #3
0
        public override double[,,] forwardPropagate(double[,,] prevActivated)
        {
            double[,,] ret = new double[1, 1, size];

            for (int i = 0; i < size; i++)
            {
                ret[0, 0, i] = matrix.bias[i];
            }

            if (prevLayer is Layer1D)
            {
                Layer1D prev = (Layer1D)prevLayer;

                for (int i = 0; i < prev.size; i++)
                {
                    for (int j = 0; j < size; j++)
                    {
                        ret[0, 0, j] += matrix.weight[0, 0, i, j] * prevActivated[0, 0, i];
                    }
                }

                for (int i = 0; i < size; i++)
                {
                    ret[0, 0, i] = Math.Tanh(ret[0, 0, i]);
                }
            }
            else if (prevLayer is Layer2D)
            {
                Layer2D prev = (Layer2D)prevLayer;

                for (int i = 0; i < prev.depth; i++)
                {
                    for (int j = 0; j < prev.width; j++)
                    {
                        for (int k = 0; k < prev.height; k++)
                        {
                            for (int l = 0; l < size; l++)
                            {
                                ret[0, 0, l] += matrix.weight[i, j, k, l] * prevActivated[i, j, k];
                            }
                        }
                    }
                }

                for (int i = 0; i < size; i++)
                {
                    ret[0, 0, i] = Math.Tanh(ret[0, 0, i]);
                }
            }

            return(ret);
        }
Exemple #4
0
        public override double[,,] backPropagate(double[,,] currActivated, double[,,] nextError, double learningRate)
        {
            double[,,] ret = new double[depth, width, height];

            if (nextLayer is Layer1D)
            {
                Layer1D next = (Layer1D)nextLayer;

                for (int d = 0; d < depth; d++)
                {
                    for (int i = 0; i < width; i++)
                    {
                        for (int j = 0; j < height; j++)
                        {
                            for (int k = 0; k < next.size; k++)
                            {
                                ret[d, i, j] += next.matrix.weight[d, i, j, k] * nextError[0, 0, k] * (1 - currActivated[d, i, j] * currActivated[d, i, j]);
                            }
                        }
                    }
                }

                for (int d = 0; d < depth; d++)
                {
                    for (int i = 0; i < width; i++)
                    {
                        for (int j = 0; j < height; j++)
                        {
                            for (int k = 0; k < next.size; k++)
                            {
                                next.matrix.weight[d, i, j, k] -= nextError[0, 0, k] * currActivated[d, i, j] * learningRate;
                            }
                        }
                    }
                }

                for (int d = 0; d < depth; d++)
                {
                    for (int i = 0; i < next.size; i++)
                    {
                        next.matrix.bias[d] -= nextError[0, 0, i] * learningRate;
                    }
                }
            }
            else if (nextLayer is ConvolutionalLayer)
            {
                ConvolutionalLayer next = (ConvolutionalLayer)nextLayer;

                for (int d = 0; d < depth; d++)
                {
                    for (int i = 0; i < next.kernelWidth; i++)
                    {
                        for (int j = 0; j < next.kernelHeight; j++)
                        {
                            for (int nd = 0; nd < next.depth; nd++)
                            {
                                for (int m = 0; m < next.width; m++)
                                {
                                    for (int n = 0; n < next.height; n++)
                                    {
                                        ret[d, i + m, j + n] += next.matrix.weight[d, nd, i, j] * nextError[nd, m, n] * (1 - currActivated[d, i + m, j + n] * currActivated[d, i + m, j + n]);
                                    }
                                }
                            }
                        }
                    }
                }

                for (int d = 0; d < depth; d++)
                {
                    for (int i = 0; i < next.kernelWidth; i++)
                    {
                        for (int j = 0; j < next.kernelHeight; j++)
                        {
                            for (int nd = 0; nd < next.depth; nd++)
                            {
                                for (int m = 0; m < next.width; m++)
                                {
                                    for (int n = 0; n < next.height; n++)
                                    {
                                        next.matrix.weight[d, nd, i, j] -= nextError[nd, m, n] * currActivated[d, i + m, j + n] * learningRate;
                                    }
                                }
                            }
                        }
                    }
                }

                for (int d = 0; d < depth; d++)
                {
                    for (int nd = 0; nd < next.depth; nd++)
                    {
                        for (int i = 0; i < next.width; i++)
                        {
                            for (int j = 0; j < next.height; j++)
                            {
                                next.matrix.bias[d] -= nextError[nd, i, j] * learningRate;
                            }
                        }
                    }
                }
            }

            return(ret);
        }