Beispiel #1
0
        internal static void ConvolutionTest()
        {
            InputLayer iL = new InputLayer(3, 3, 1);

            ConvolutionalLayer cL = new ConvolutionalLayer(1, 2, 1, 0, iL, ActivationFunctions.None());
            //Debug.WriteLine("Filter ---------------");
            int i = 0;

            for (int y = 0; y < 2; y++)
            {
                StringBuilder sb = new StringBuilder();
                for (int x = 0; x < 2; x++, i++)
                {
                    if (x != 0)
                    {
                        sb.Append(", ");
                    }

                    float w = 0.1f * (i + 1);

                    cL.SetWeight(i, w);
                    sb.Append(w);
                }
                //Debug.WriteLine(sb.ToString());
            }

            FullyConnectedLayer fcL = new FullyConnectedLayer(1, cL, ActivationFunctions.None());
            //Debug.WriteLine("Weights ---------------");
            StringBuilder SB = new StringBuilder();

            for (int j = 0; j < fcL.Weights; j++, i++)
            {
                if (j != 0)
                {
                    SB.Append(", ");
                }

                float w = 1f;

                fcL.SetWeight(j, w);
                SB.Append(w);
            }
            //Debug.WriteLine(SB.ToString());


            NeuralNetwork nn = new NeuralNetwork(iL, cL, fcL);


            float[,,] inputValues = new float[3, 3, 1];
            i = 0;
            for (int z = 0; z < 1; z++)
            {
                //Debug.WriteLine($"{z} --------------------");
                for (int y = 0; y < 3; y++)
                {
                    StringBuilder sb = new StringBuilder();
                    for (int x = 0; x < 3; x++, i++)
                    {
                        //if (x != 0)
                        //    sb.Append(",\t");

                        inputValues[x, y, z] = i + 1;
                        //sb.Append(inputValues[x, y, z]);
                    }
                    //Debug.WriteLine(sb.ToString());
                }
            }
            NNFeedData inputData = new NNFeedData(inputValues);

            Debug.WriteLine(nn.FeedForward(inputData)[0]);

            NNFeedData     targetData   = new NNFeedData(1, 1, 1, -3.2f);
            NNTrainingData trainingData = new NNPreloadedTrainingData(new[] { inputData }, new [] { targetData });

            NNBackpropagationData backpropagationData = new NNBackpropagationData(trainingData, 0.2f, (o, t) => o - t);

            //for (int j = 0; j < 1000; j++) {
            nn.PropagateBackward(backpropagationData);
            //}

            Debug.WriteLine(nn.FeedForward(inputData)[0]);

            //NNDetailedFeedData outputData = iL.FeedForward(inputData);
            //outputData = cL.FeedForward(outputData.OutputData);

            //Debug.WriteLine("Convolution Out");
            //for (int z = 0; z < cL.Depth; z++) {
            //    Debug.WriteLine($"{z} --------------------");
            //    for (int y = 0; y < cL.Height; y++) {
            //        StringBuilder sb = new StringBuilder();
            //        for (int x = 0; x < cL.Width; x++) {
            //            if (x != 0)
            //                sb.Append(", ");

            //            sb.Append($"{outputData.OutputData[outputData.OutputData.ToNeuronIndex(x, y, z)]}");
            //        }
            //        Debug.WriteLine(sb.ToString());
            //    }
            //}

            //outputData = fcL.FeedForward(outputData.OutputData);
            //Debug.WriteLine("Fully Connected Out");
            //Debug.WriteLine(outputData.OutputData[0]);
        }
Beispiel #2
0
        internal static Layer FromXML(XMLElement root, Layer previousLayer) {

            float[] StringToArray(string s) {
                string[] valStrs = s.Split(',');
                float[] array = new float[valStrs.Length];
                for (int i = 0; i < valStrs.Length; i++) {
                    array[i] = float.Parse(valStrs[i]);
                }
                return array;
            }

            string type = root.Tag;
            if (type == typeof(InputLayer).Name) {
                int width = int.Parse(root.GetAttribute("Width").Value);
                int height = int.Parse(root.GetAttribute("Height").Value);
                int depth = int.Parse(root.GetAttribute("Depth").Value);

                return new InputLayer(width, height, depth);
            }

            if (type == typeof(MaxPoolingLayer).Name) {
                int filterSize = int.Parse(root.GetAttribute("FilterSize").Value);
                int stride = int.Parse(root.GetAttribute("Stride").Value);

                return new MaxPoolingLayer(filterSize, stride, previousLayer);
            }

            if (type == typeof(FullyConnectedLayer).Name) {
                int neurons = int.Parse(root.GetAttribute("Neurons").Value);
                ActivationFunctions.ActivationFunction activation = ActivationFunctions.FromXML(root.ChildWithTag("ActivationFunction"));

                FullyConnectedLayer layer = new FullyConnectedLayer(neurons, previousLayer, activation);

                float[] weights = StringToArray(root.ChildWithTag("Weights").Value);
                for (int i = 0; i < layer.Weights; i++) {
                    layer.SetWeight(i, weights[i]);
                }

                float[] biases = StringToArray(root.ChildWithTag("Biases").Value);
                for (int i = 0; i < layer.Biases; i++) {
                    layer.SetBias(i, biases[i]);
                }

                return layer;
            }

            if (type == typeof(ConvolutionalLayer).Name) {
                int filterCount = int.Parse(root.GetAttribute("FilterCount").Value);
                int filterSize = int.Parse(root.GetAttribute("FilterSize").Value);
                int stride = int.Parse(root.GetAttribute("Stride").Value);
                int zeroPadding = int.Parse(root.GetAttribute("ZeroPadding").Value);
                ActivationFunctions.ActivationFunction activation = ActivationFunctions.FromXML(root.ChildWithTag("ActivationFunction"));

                ConvolutionalLayer layer = new ConvolutionalLayer(filterCount, filterSize, stride, zeroPadding, previousLayer, activation);

                float[] weights = StringToArray(root.ChildWithTag("Weights").Value);
                for (int i = 0; i < layer.Weights; i++) {
                    layer.SetWeight(i, weights[i]);
                }

                float[] biases = StringToArray(root.ChildWithTag("Biases").Value);
                for (int i = 0; i < layer.Biases; i++) {
                    layer.SetBias(i, biases[i]);
                }

                return layer;
            }

            throw new NotImplementedException();    // Never reached
        }