コード例 #1
0
 public void Summary()
 {
     Console.Write(NeuralNetworkAPI.PadRight("层", 30));
     Console.Write(NeuralNetworkAPI.PadRight("输入"));
     Console.Write(NeuralNetworkAPI.PadRight("神经元"));
     Console.Write(NeuralNetworkAPI.PadRight("激活函数"));
     Console.Write(NeuralNetworkAPI.PadRight("Padding"));
     Console.Write(NeuralNetworkAPI.PadRight("输出"));
     if (!IsSequential)
     {
         Console.Write(NeuralNetworkAPI.PadRight("连接"));
     }
     Console.WriteLine();
     foreach (var layer in Layers)
     {
         Console.Write(NeuralNetworkAPI.PadRight(layer.Name, 30));
         Console.Write(NeuralNetworkAPI.PadRight(NeuralNetworkAPI.Shape(layer.InputShape)));
         layer.Summary(out object neuronColumn, out object activationFunctionColumn, out object paddingColumn);
         Console.Write(NeuralNetworkAPI.PadRight(neuronColumn));
         Console.Write(NeuralNetworkAPI.PadRight(activationFunctionColumn));
         Console.Write(NeuralNetworkAPI.PadRight(paddingColumn));
         Console.Write(NeuralNetworkAPI.PadRight(NeuralNetworkAPI.Shape(layer.OutputShape)));
         if (!IsSequential)
         {
             Console.Write(NeuralNetworkAPI.PadRight(NeuralNetworkAPI.Previous(layer.Previous)));
         }
         Console.WriteLine();
     }
 }
コード例 #2
0
        public override object ForwardPropagation(object inputs)
        {
            float[,,] inputValues = (float[, , ])inputs;
            float[,,] outputs     = new float[
                NeuralNetworkAPI.CalculateOutputLength(inputValues.GetLength(0), Height, StrideY, PaddingType),
                NeuralNetworkAPI.CalculateOutputLength(inputValues.GetLength(1), Width, StrideX, PaddingType),
                Kernels.Length
                                    ];
#if NET40_OR_GREATER
            Parallel.For(0, Kernels.Length, i => {
                ForwardPropagation(i);
            });
#else
            for (int i = 0; i < Kernels.Length; i++)
            {
                ForwardPropagation(i);
            }
#endif
            return(Outputs = outputs);

            void ForwardPropagation(int i)
            {
                Kernels[i].ForwardPropagation(inputValues, ref outputs, i, ActivationFunctionType);
            }
        }
コード例 #3
0
 public int PredictClasses(object inputs)
 {
     if (inputs is null)
     {
         throw new ArgumentNullException(nameof(inputs));
     }
     return(NeuralNetworkAPI.IndexOfMax((float[])ForwardPropagation(inputs)));
 }
コード例 #4
0
 public float[,,,] GetConv2DWeights(string name, int width, int height, int channel, int units)
 {
     if (name is null)
     {
         throw new ArgumentNullException(nameof(name));
     }
     NeuralNetworkAPI.GetDataSet(H5GroupIdModelWeights, $"{name}/{name}/kernel:0", out float[,,,] weights, width, height, channel, units);
     return(weights);
 }
コード例 #5
0
 public float[,] GetDenseWeights(string name, int units, int inputShape)
 {
     if (name is null)
     {
         throw new ArgumentNullException(nameof(name));
     }
     NeuralNetworkAPI.GetDataSet(H5GroupIdModelWeights, $"{name}/{name}/kernel:0", out float[,] weights, units, inputShape);
     return(weights);
 }
コード例 #6
0
 public NeuralNetworkBoundBox(float score, float minX, float minY, float maxX, float maxY, float[] classes)
 {
     Score   = score;
     MinX    = minX;
     MinY    = minY;
     MaxX    = maxX;
     MaxY    = maxY;
     Classes = classes;
     Label   = NeuralNetworkAPI.IndexOfMax(classes);
 }
コード例 #7
0
        }        //todo 待实现

        void PrintBoxes(string title, List <NeuralNetworkBoundBox> boundBoxes)
        {
            return;

            Console.WriteLine();
            Console.WriteLine(title);
            foreach (var boundBox in boundBoxes)
            {
                int label = NeuralNetworkAPI.IndexOfMax(boundBox.Classes);
                Console.WriteLine($"标签:{label} 得分:{boundBox.Classes[label]} 最小X:{boundBox.MinX} 最小Y:{boundBox.MinY} 最大X:{boundBox.MaxX} 最大Y:{boundBox.MaxY}");
                Console.WriteLine($"Classes:{string.Join (", ", Array.ConvertAll (boundBox.Classes, classes => classes.ToString ()))}");
            }
        }
コード例 #8
0
        public float[] GetBiases(string name, int units)
        {
            if (name is null)
            {
                throw new ArgumentNullException(nameof(name));
            }
            ObjectInfo objectInfo = null;

            try {
                objectInfo = H5G.getObjectInfo(H5GroupIdModelWeights, $"{name}/{name}/bias:0", true);
            } catch {
            }
            if (objectInfo == null)
            {
                return(new float[units]);
            }
            NeuralNetworkAPI.GetDataSet(H5GroupIdModelWeights, $"{name}/{name}/bias:0", out float[] biases, units);
            return(biases);
        }
コード例 #9
0
        public override object ForwardPropagation(object inputs)
        {
            float[] inputValues = (float[])inputs;
            float[] outputs     = new float[Neurons.Length];
#if NETFRAMEWORK
            Parallel.For(0, Neurons.Length, i => {
                ForwardPropagation(i);
            });
#else
            for (int i = 0; i < Neurons.Length; i++)
            {
                ForwardPropagation(i);
            }
#endif
            switch (ActivationFunctionType)
            {
            case NeuralNetworkActivationFunctionType.ReLU:
                break;

            case NeuralNetworkActivationFunctionType.Softmax: {
                NeuralNetworkAPI.Softmax(outputs);
                break;
            }

            case NeuralNetworkActivationFunctionType.Sigmoid:
                break;

            case NeuralNetworkActivationFunctionType.Linear:
                break;

            default:
                throw new NotImplementedException(ActivationFunctionType.ToString());
            }
            return(Outputs = outputs);

            void ForwardPropagation(int i)
            {
                outputs[i] = Neurons[i].ForwardPropagation(inputValues, ActivationFunctionType);
            }
        }
コード例 #10
0
 public float[] GetValues(string name, string key, int length)
 {
     NeuralNetworkAPI.GetDataSet(H5GroupIdModelWeights, $"{name}/{name}/{key}:0", out float[] values, length);
     return(values);
 }
コード例 #11
0
 public JsonObject GetModelConfig()
 {
     return(JsonObject.Parse(NeuralNetworkAPI.GetAttributeValue(H5GroupId, NeuralNetworkKeyword.ModelConfig)));
 }
コード例 #12
0
        public void Load(INeuralNetworkLoader loader, JsonTextWriter textWriter = null)
        {
            if (loader is null)
            {
                throw new ArgumentNullException(nameof(loader));
            }
            JsonObject modelConfig = loader.GetModelConfig();

            if (textWriter != null)
            {
                textWriter.BeginObject();
                textWriter.Write(NeuralNetworkKeyword.ModelConfig);
                new JsonTextBuilder(new JsonValueReader(modelConfig), textWriter).BuildObject();
            }
            IsSequential = modelConfig["class_name"] == "Sequential";
            NeuralNetworkLayer previousLayer = null;

            Layers.Clear();
            foreach (JsonObject jsonLayer in modelConfig["config"]["layers"].Array)
            {
                string             className  = jsonLayer["class_name"];
                JsonObject         config     = jsonLayer["config"];
                string             name       = config["name"];
                JsonValue          activation = config["activation"];
                JsonValue          dataFormat = config["data_format"];
                JsonValue          padding    = config["padding"];
                NeuralNetworkLayer layer;
                JsonArray          inboundNodes = jsonLayer["inbound_nodes"][0];
                foreach (JsonValue inboundNode in inboundNodes)
                {
                    string targetName = inboundNode[0];
                    foreach (NeuralNetworkLayer currentLayer in Layers)
                    {
                        if (currentLayer.Name == targetName)
                        {
                            previousLayer = currentLayer;
                            break;
                        }
                    }
                }
                switch (className)
                {
                case "InputLayer": {
                    JsonArray batchInputShape = config["batch_input_shape"];
                    batchInputShape.RemoveAt(0);
                    List <int> inputShapes = JsonConvert.Deserialize <List <int> > (batchInputShape);
                    AddLayer(new NeuralNetworkInputLayer());
                    layer.InputShape  = inputShapes.ToArray();
                    layer.OutputShape = layer.InputShape;
                    break;
                }

                case nameof(NeuralNetworkLayerType.Dense): {
                    int units = config["units"];
                    float[,] weights = loader.GetDenseWeights(name, units, previousLayer.OutputShape[0]);
                    float[] biases = loader.GetBiases(name, units);
                    if (textWriter != null)
                    {
                        WriteLayer(textWriter, name, weights, biases);
                    }
                    NeuralNetworkNeuron[] neurons = new NeuralNetworkNeuron[units];
                    for (int i = 0; i < neurons.Length; i++)
                    {
                        neurons[i] = new NeuralNetworkNeuron(previousLayer.OutputShape[0], biases[i]);
                        for (int n = 0; n < previousLayer.OutputShape[0]; n++)
                        {
                            neurons[i].Weights[n] = weights[n, i];
                        }
                    }
                    int[] outputShape = new int[] { units };
                    AddLayer(new NeuralNetworkDenseLayer(neurons, NeuralNetworkAPI.GetActivationFunctionType(activation)));
                    layer.OutputShape = outputShape;
                    break;
                }

                case nameof(NeuralNetworkLayerType.Conv2D): {
                    JsonArray kernelSize = config["kernel_size"];
                    JsonArray strides    = config["strides"];
                    int       units      = config["filters"];
                    int       width      = kernelSize[1];
                    int       height     = kernelSize[0];
                    int       channel    = previousLayer.OutputShape[2];
                    int       strideX    = strides[1];
                    int       strideY    = strides[0];
                    float[,,,] weights = loader.GetConv2DWeights(name, width, height, channel, units);
                    float[] biases = loader.GetBiases(name, units);
                    if (textWriter != null)
                    {
                        WriteLayer(textWriter, name, weights, biases);
                    }
                    NeuralNetworkPaddingType paddingType = NeuralNetworkAPI.GetPaddingType(padding);
                    NeuralNetworkKernel[]    kernels     = new NeuralNetworkKernel[units];
                    for (int i = 0; i < kernels.Length; i++)
                    {
                        kernels[i] = new NeuralNetworkKernel(width, height, channel, strideX, strideY, biases[i], paddingType);
                        for (int c = 0; c < channel; c++)
                        {
                            for (int y = 0; y < height; y++)
                            {
                                for (int x = 0; x < width; x++)
                                {
                                    kernels[i].Weights[y, x, c] = weights[y, x, c, i];
                                }
                            }
                        }
                    }
                    int[] outputShape = new int[] {
                        NeuralNetworkAPI.CalculateOutputLength(previousLayer.OutputShape[0], height, strideY, paddingType),
                        NeuralNetworkAPI.CalculateOutputLength(previousLayer.OutputShape[1], width, strideX, paddingType),
                        units
                    };
                    AddLayer(new NeuralNetworkConv2DLayer(
                                 kernels,
                                 width,
                                 height,
                                 strideX,
                                 strideY,
                                 NeuralNetworkAPI.GetActivationFunctionType(activation), paddingType)
                             );
                    layer.OutputShape = outputShape;
                    break;
                }

                case nameof(NeuralNetworkLayerType.MaxPooling2D): {
                    JsonArray poolSize = config["pool_size"];
                    JsonArray strides  = config["strides"];
                    int       width    = poolSize[1];
                    int       height   = poolSize[0];
                    int       strideX  = strides[1];
                    int       strideY  = strides[0];
                    NeuralNetworkPaddingType paddingType = NeuralNetworkAPI.GetPaddingType(padding);
                    int[] outputShape = new int[] {
                        NeuralNetworkAPI.CalculateOutputLength(previousLayer.OutputShape[0], height, strideY, paddingType),
                        NeuralNetworkAPI.CalculateOutputLength(previousLayer.OutputShape[1], width, strideX, paddingType),
                        previousLayer.OutputShape[2]
                    };
                    AddLayer(new NeuralNetworkMaxPooling2DLayer(width, height, strideX, strideY, paddingType));
                    layer.OutputShape = outputShape;
                    break;
                }

                case nameof(NeuralNetworkLayerType.Dropout): {
                    layer             = new NeuralNetworkDropoutLayer();
                    layer.OutputShape = previousLayer.OutputShape;
                    break;
                }

                case nameof(NeuralNetworkLayerType.Flatten): {
                    NeuralNetworkDataFormatType dataFormatType = NeuralNetworkAPI.GetDataFormatType(dataFormat);
                    int sum = 1;
                    NeuralNetworkLayer lastLayer = Layers[Layers.Count - 1];
                    foreach (int length in lastLayer.OutputShape)
                    {
                        sum *= length;
                    }
                    int[] outputShape = new int[] { sum };
                    AddLayer(new NeuralNetworkFlattenLayer(dataFormatType));
                    layer.OutputShape = outputShape;
                    break;
                }

                case nameof(NeuralNetworkLayerType.Add): {
                    AddLayer(new NeuralNetworkAddLayer());
                    layer.OutputShape = previousLayer.OutputShape;
                    break;
                }

                case nameof(NeuralNetworkLayerType.BatchNormalization): {
                    AddLayer(new NeuralNetworkBatchNormalizationLayer(
                                 loader.GetValues(name, "beta", previousLayer.OutputShape[2]),
                                 loader.GetValues(name, "gamma", previousLayer.OutputShape[2]),
                                 loader.GetValues(name, "moving_mean", previousLayer.OutputShape[2]),
                                 loader.GetValues(name, "moving_variance", previousLayer.OutputShape[2]),
                                 config["epsilon"]
                                 ));
                    layer.OutputShape = previousLayer.OutputShape;
                    break;
                }

                case nameof(NeuralNetworkLayerType.LeakyReLU): {
                    AddLayer(new NeuralNetworkLeakyReLULayer(config["alpha"]));
                    layer.OutputShape = previousLayer.OutputShape;
                    break;
                }

                case nameof(NeuralNetworkLayerType.ZeroPadding2D): {
                    int paddingX = config["padding"][1][0];
                    int paddingY = config["padding"][0][0];
                    AddLayer(new NeuralNetworkZeroPadding2DLayer(paddingX, paddingY));
                    int[] outputShape = new int[] {
                        previousLayer.OutputShape[0] + paddingY * 2,
                        previousLayer.OutputShape[1] + paddingX * 2,
                        previousLayer.OutputShape[2]
                    };
                    layer.OutputShape = outputShape;
                    break;
                }

                case nameof(NeuralNetworkLayerType.UpSampling2D): {
                    int sizeX = config["size"][1];
                    int sizeY = config["size"][0];
                    AddLayer(new NeuralNetworkUpSampling2DLayer(sizeX, sizeY));
                    int[] outputShape = new int[] { previousLayer.OutputShape[0] * sizeY, previousLayer.OutputShape[1] * sizeX, previousLayer.OutputShape[2] };
                    layer.OutputShape = outputShape;
                    break;
                }

                case nameof(NeuralNetworkLayerType.Concatenate): {
                    AddLayer(new NeuralNetworkConcatenateLayer());
                    int channels = 0;
                    foreach (NeuralNetworkLayer currentLayer in layer.Previous)
                    {
                        channels += currentLayer.OutputShape[2];
                    }
                    int[] outputShape = new int[] { previousLayer.OutputShape[0], previousLayer.OutputShape[1], channels };
                    layer.OutputShape = outputShape;
                    break;
                }

                default:
                    throw new NotImplementedException(className);
                }
                void AddLayer(NeuralNetworkLayer newLayer)
                {
                    newLayer.Name = name;
                    if (IsSequential)
                    {
                        if (Layers.Count > 0)
                        {
                            newLayer.InputShape = previousLayer.OutputShape;
                            previousLayer.Nexts.Add(newLayer);
                            newLayer.Previous = new NeuralNetworkLayer[] { previousLayer };
                        }
                        previousLayer = newLayer;
                    }
                    else
                    {
                        List <NeuralNetworkLayer> previous = new List <NeuralNetworkLayer> ();
                        foreach (JsonValue inboundNode in inboundNodes)
                        {
                            string targetName = inboundNode[0];
                            foreach (NeuralNetworkLayer currentLayer in Layers)
                            {
                                if (currentLayer.Name == targetName)
                                {
                                    currentLayer.Nexts.Add(newLayer);
                                    previous.Add(currentLayer);
                                    previousLayer = currentLayer;
                                }
                            }
                        }
                        if (previous.Count > 0)
                        {
                            newLayer.Previous   = previous.ToArray();
                            newLayer.InputShape = newLayer.Previous[0].OutputShape;
                        }
                    }
                    Layers.Add(newLayer);
                    layer = newLayer;
                }
            }
            if (textWriter != null)
            {
                textWriter.EndObject();
            }
            if (IsSequential)
            {
                OutputLayers = new NeuralNetworkLayer[] { previousLayer };
            }
            else
            {
                JsonArray outputLayers = modelConfig["config"]["output_layers"];
                OutputLayers = new NeuralNetworkLayer[outputLayers.Count];
                for (int i = 0; i < outputLayers.Count; i++)
                {
                    string targetName = outputLayers[i][0];
                    OutputLayers[i] = Layers.Find(layer => layer.Name == targetName);
                }
            }
        }
コード例 #13
0
        public override object ForwardPropagation(object inputs)
        {
            float[,,] inputValues = (float[, , ])inputs;
            int inputWidth   = inputValues.GetLength(1);
            int inputHeight  = inputValues.GetLength(0);
            int inputChannel = inputValues.GetLength(2);
            int outputWidth  = NeuralNetworkAPI.CalculateOutputLength(inputValues.GetLength(1), Width, StrideX, PaddingType);
            int outputHeight = NeuralNetworkAPI.CalculateOutputLength(inputValues.GetLength(0), Height, StrideY, PaddingType);
            int paddingX;
            int paddingY;

            switch (PaddingType)
            {
            case NeuralNetworkPaddingType.Valid:
                paddingX = 0;
                paddingY = 0;
                break;

            case NeuralNetworkPaddingType.Same:
                paddingX = (Width - 1) / 2;
                paddingY = (Height - 1) / 2;
                break;

            default:
                throw new NotImplementedException(PaddingType.ToString());
            }
            int startX = 0 - paddingX;
            int startY = 0 - paddingY;
            int endX   = inputWidth + paddingX - Width;
            int endY   = inputHeight + paddingY - Height;

            float[,,] outputs = new float[outputHeight, outputWidth, inputChannel];
#if NET40_OR_GREATER
            Parallel.For(0, inputChannel, c => {
                int outputX = 0;
                int outputY = 0;
                for (int y = startY; y <= endY; y += StrideY, outputY++)
                {
                    for (int x = startX; x <= endX; x += StrideX, outputX++)
                    {
                        bool first = true;
                        float max  = 0;
                        int ex     = x + Width;
                        int eY     = y + Height;
                        for (int cy = y; cy < eY; cy++)
                        {
                            for (int cx = x; cx < ex; cx++)
                            {
                                float value;
                                if (cy < 0 || cy >= endY || cx < 0 || cx >= endX)
                                {
                                    value = 0;
                                }
                                else
                                {
                                    value = inputValues[cy, cx, c];
                                }
                                if (first || max < value)
                                {
                                    first = false;
                                    max   = value;
                                }
                            }
                        }
                        outputs[outputY, outputX, c] = max;
                    }
                    outputX = 0;
                }
            });
#else
            for (int c = 0; c < inputChannel; c++)
            {
                int outputX = 0;
                int outputY = 0;
                for (int y = startY; y <= endY; y += StrideY, outputY++)
                {
                    for (int x = startX; x <= endX; x += StrideX, outputX++)
                    {
                        bool  first = true;
                        float max   = 0;
                        int   ex    = x + Width;
                        int   eY    = y + Height;
                        for (int cy = y; cy < eY; cy++)
                        {
                            for (int cx = x; cx < ex; cx++)
                            {
                                float value;
                                if (cy < 0 || cy >= endY || cx < 0 || cx >= endX)
                                {
                                    value = 0;
                                }
                                else
                                {
                                    value = inputValues[cy, cx, c];
                                }
                                if (first || max < value)
                                {
                                    first = false;
                                    max   = value;
                                }
                            }
                        }
                        outputs[outputY, outputX, c] = max;
                    }
                    outputX = 0;
                }
            }
#endif
            return(Outputs = outputs);
        }