Exemplo n.º 1
0
        //TODO figure out batching for a CNN
        public ConvolutionalLayer(ConvolutionalLayerProps props, ActivationFunctions activationFunction, uint batches = 1, bool outputLayer = false)
            : base((uint)props.ConvolutionalOutputs, 0, activationFunction, batches, outputLayer)
        {
            Props        = props;
            InputSegment = new float[props.InputRows][];
            for (int i = 0; i < InputSegment.Length; i++)
            {
                InputSegment[i] = new float[props.InputCols];
            }

            FeatureMapProps featureProps = new FeatureMapProps(props);

            FeatureMaps = new FeatureMap[props.NumberFeatures];
            for (int f = 0; f < FeatureMaps.Length; f++)
            {
                FeatureMaps[f] = new FeatureMap(featureProps);
            }

            MaxPoolProps poolProps = new MaxPoolProps(props);

            MaxPools = new MaxPool[props.NumberFeatures];
            for (int p = 0; p < FeatureMaps.Length; p++)
            {
                MaxPools[p] = new MaxPool(poolProps);
            }
        }
        private void SigmoidActivation(DataSample sample)
        {
            if (sample.data.Length != inputSize)
            {
                Console.WriteLine("NOT the same size - " + sample.data.Length + " / " + inputSize);
                return;
            }

            inputLayerValue = sample.data;

            // First Layer Activation
            for (int i = 0; i < hiddenSize; i++)
            {
                float sum = 0;

                for (int j = 0; j < inputSize; j++)
                {
                    sum += (float)(inputLayerValue[j] * inputLayerWeights[j, i]);
                }

                hiddenLayerValue[0, i] = ActivationFunctions.Sigmoid(sum);
            }

            // Hidden Layer Activation
            for (int i = 1; i < hiddenDimension; i++)
            {
                for (int j = 0; j < hiddenSize; j++)
                {
                    float sum = 0;

                    for (int k = 0; k < hiddenSize; k++)
                    {
                        sum += (hiddenLayerValue[i - 1, k] * hiddenLayerWeights[i - 1, j, k]);
                    }

                    hiddenLayerValue[i, j] = ActivationFunctions.Sigmoid(sum);
                }
            }

            // Output Layer Activation
            for (int i = 0; i < outputSize; i++)
            {
                float sum = 0;

                for (int j = 0; j < hiddenSize; j++)
                {
                    sum += (hiddenLayerValue[hiddenDimension - 1, j] * outputLayerWeights[i, j]);
                }
                outputLayerValue[i] = ActivationFunctions.Sigmoid(sum);
            }
        }
Exemplo n.º 3
0
        public Layer(uint numPerceptrons, uint numInputs, ActivationFunctions activationFunction, uint batches = 1, bool outputLayer = false)
        {
            ActivationFunction = activationFunction;
            Perceptrons        = new Perceptron[numPerceptrons];
            InputCount         = numInputs;
            OutputLayer        = outputLayer;

            if (batches == 0)
            {
                batches = 1;
            }
            TotalBatches = batches;

            for (uint i = 0; i < Perceptrons.Length; i++)
            {
                Perceptrons[i] = new Perceptron(InputCount, Batches, OutputLayer);
            }
        }
Exemplo n.º 4
0
        // создание матриц весов и входных и выходных сигналов
        void Create()
        {
            if (structure.inputs < 1)
            {
                throw new Exception("Create NeuralNetwork: inputs must be greater than zero");
            }

            if (structure.hiddens.Length == 0)
            {
                throw new Exception("Create NeuralNetwork: hiddens is null or zero");
            }

            for (int i = 0; i < structure.hiddens.Length; i++)
            {
                if (structure.hiddens[i] < 1)
                {
                    throw new Exception("Create NeuralNetwork: hiddens at " + i + " layer must be greater than zero");
                }
            }

            if (structure.outputs < 1)
            {
                throw new Exception("Create NeuralNetwork: outputs must be greater than zero");
            }

            weights = new Matrix[1 + structure.hiddens.Length];

            weights[0] = new Matrix(structure.hiddens[0], structure.inputs);

            for (int i = 0; i < structure.hiddens.Length - 1; i++)
            {
                weights[i + 1] = new Matrix(structure.hiddens[i + 1], structure.hiddens[i]);
            }

            weights[weights.Length - 1] = new Matrix(structure.outputs, structure.hiddens[structure.hiddens.Length - 1]);

            hiddensActivation = ActivationFunctions.GetFunction(structure.hiddensFunction);
            hiddensDerivative = ActivationFunctions.GetDerivative(structure.hiddensFunction);

            outputActivation = ActivationFunctions.GetFunction(structure.outputFunction);
            outputDerivative = ActivationFunctions.GetDerivative(structure.outputFunction);
        }
Exemplo n.º 5
0
        public NeuralNetwork(uint numInputs, uint numHiddenNodes, uint numOutputs, uint batches = 1)
        {
            NumInputs      = numInputs;
            NumHiddenNodes = numHiddenNodes;
            NumOutputs     = numOutputs;

            m_ActivationFunction = new ActivationFunctions();
            Layers = new Layer[3];

            if (batches < 1)
            {
                batches = 1;
            }
            TotalBatches = batches;
            ResetBatch();

            InputLayer  = Layers[0] = new Layer(numInputs, 0, m_ActivationFunction, batches);
            HiddenLayer = Layers[1] = new Layer(numHiddenNodes, numInputs, m_ActivationFunction, batches);
            OutputLayer = Layers[Layers.Length - 1] = new Layer(numOutputs, numHiddenNodes, m_ActivationFunction, batches, true);

            ConfusionMatrix = new ConfusionMatrix(numOutputs);

            Reset();
        }