예제 #1
0
        private static Layer parse_crop(KeyValuePair[] options, SizeParams parameters)
        {
            int   cropHeight = OptionList.option_find_int(options, "crop_height", 1);
            int   cropWidth  = OptionList.option_find_int(options, "crop_width", 1);
            bool  flip       = OptionList.option_find_int(options, "flip", 0) != 0;
            float angle      = OptionList.option_find_float(options, "angle", 0);
            float saturation = OptionList.option_find_float(options, "saturation", 1);
            float exposure   = OptionList.option_find_float(options, "exposure", 1);

            int batch, h, w, c;

            h     = parameters.H;
            w     = parameters.W;
            c     = parameters.C;
            batch = parameters.Batch;
            if (!(h != 0 && w != 0 && c != 0))
            {
                Utils.Error("Layer before crop Layer must output image.");
            }

            bool noadjust = OptionList.option_find_int_quiet(options, "noadjust", 0) != 0;

            Layer l = Layer.make_crop_layer(batch, h, w, c, cropHeight, cropWidth, flip, angle, saturation, exposure);

            l.Shift    = OptionList.option_find_float(options, "shift", 0);
            l.Noadjust = noadjust;
            return(l);
        }
예제 #2
0
        private static Layer parse_normalization(KeyValuePair[] options, SizeParams parameters)
        {
            float alpha = OptionList.option_find_float(options, "alpha", .0001f);
            float beta  = OptionList.option_find_float(options, "beta", .75f);
            float kappa = OptionList.option_find_float(options, "kappa", 1);
            int   size  = OptionList.option_find_int(options, "size", 5);

            return(Layer.make_normalization_layer(parameters.Batch, parameters.W, parameters.H, parameters.C, size, alpha, beta, kappa));
        }
예제 #3
0
        private static Layer parse_connected(KeyValuePair[] options, SizeParams parameters)
        {
            int        output         = OptionList.option_find_int(options, "output", 1);
            string     activationS    = OptionList.option_find_str(options, "activation", "logistic");
            Activation activation     = ActivationsHelper.Get_activation(activationS);
            bool       batchNormalize = OptionList.option_find_int_quiet(options, "batch_normalize", 0) != 0;

            return(Layer.make_connected_layer(parameters.Batch, parameters.Inputs, output, activation, batchNormalize));
        }
예제 #4
0
        private static Layer parse_gru(KeyValuePair[] options, SizeParams parameters)
        {
            int  output         = OptionList.option_find_int(options, "output", 1);
            bool batchNormalize = OptionList.option_find_int_quiet(options, "batch_normalize", 0) != 0;

            Layer l = Layer.make_gru_layer(parameters.Batch, parameters.Inputs, output, parameters.TimeSteps, batchNormalize);

            return(l);
        }
예제 #5
0
        private static Layer parse_dropout(KeyValuePair[] options, SizeParams parameters)
        {
            float probability = OptionList.option_find_float(options, "probability", .5f);
            Layer layer       = Layer.make_dropout_layer(parameters.Batch, parameters.Inputs, probability);

            layer.OutW = parameters.W;
            layer.OutH = parameters.H;
            layer.OutC = parameters.C;
            return(layer);
        }
예제 #6
0
        private static Layer parse_cost(KeyValuePair[] options, SizeParams parameters)
        {
            string   typeS = OptionList.option_find_str(options, "type", "sse");
            CostType type  = (CostType)Enum.Parse(typeof(CostType), typeS);
            float    scale = OptionList.option_find_float_quiet(options, "scale", 1);
            Layer    layer = Layer.make_cost_layer(parameters.Batch, parameters.Inputs, type, scale);

            layer.Ratio = OptionList.option_find_float_quiet(options, "ratio", 0);
            return(layer);
        }
예제 #7
0
        private static Layer parse_region(KeyValuePair[] options, SizeParams parameters)
        {
            int coords  = OptionList.option_find_int(options, "coords", 4);
            int classes = OptionList.option_find_int(options, "classes", 20);
            int num     = OptionList.option_find_int(options, "num", 1);

            Layer l = Layer.make_region_layer(parameters.Batch, parameters.W, parameters.H, num, classes, coords);

            l.Log  = OptionList.option_find_int_quiet(options, "log", 0);
            l.Sqrt = OptionList.option_find_int_quiet(options, "sqrt", 0) != 0;

            l.Softmax  = OptionList.option_find_int(options, "softmax", 0) != 0;
            l.MaxBoxes = OptionList.option_find_int_quiet(options, "max", 30);
            l.Jitter   = OptionList.option_find_float(options, "jitter", .2f);
            l.Rescore  = OptionList.option_find_int_quiet(options, "rescore", 0) != 0;

            l.Thresh   = OptionList.option_find_float(options, "thresh", .5f);
            l.Classfix = OptionList.option_find_int_quiet(options, "classfix", 0);
            l.Absolute = OptionList.option_find_int_quiet(options, "absolute", 0);
            l.Random   = OptionList.option_find_int_quiet(options, "random", 0) != 0;

            l.CoordScale    = OptionList.option_find_float(options, "coord_scale", 1);
            l.ObjectScale   = OptionList.option_find_float(options, "object_scale", 1);
            l.NoobjectScale = OptionList.option_find_float(options, "noobject_scale", 1);
            l.ClassScale    = OptionList.option_find_float(options, "class_scale", 1);
            l.BiasMatch     = OptionList.option_find_int_quiet(options, "bias_match", 0) != 0;

            string treeFile = OptionList.option_find_str(options, "tree", "");

            if (!string.IsNullOrEmpty(treeFile))
            {
                l.SoftmaxTree = new Tree(treeFile);
            }
            string mapFile = OptionList.option_find_str(options, "map", "");

            if (!string.IsNullOrEmpty(mapFile))
            {
                l.Map = Utils.read_map(mapFile);
            }

            string a = OptionList.option_find_str(options, "anchors", null);

            if (!string.IsNullOrEmpty(a))
            {
                var lines = a.Split(',');
                for (int i = 0; i < lines.Length; ++i)
                {
                    l.BiasesComplete[l.BiasesIndex + i] = float.Parse(lines[i]);
                }
            }
            return(l);
        }
예제 #8
0
        private static Layer parse_route(KeyValuePair[] options, SizeParams parameters, Network net)
        {
            string l = OptionList.option_find(options, "layers");

            if (string.IsNullOrEmpty(l))
            {
                Utils.Error("Route Layer must specify input layers");
            }
            var lines = l.Split(',');
            int n     = lines.Length - 1;


            int[] layers = new int[n];
            int[] sizes  = new int[n];

            for (var i = 0; i < lines.Length; ++i)
            {
                int index = int.Parse(lines[i]);
                if (index < 0)
                {
                    index = parameters.Index + index;
                }
                layers[i] = index;
                sizes[i]  = net.Layers[index].Outputs;
            }

            int batch = parameters.Batch;

            Layer layer = Layer.make_route_layer(batch, n, layers, sizes);

            var first = net.Layers[layers[0]];

            layer.OutW = first.OutW;
            layer.OutH = first.OutH;
            layer.OutC = first.OutC;
            for (var i = 1; i < n; ++i)
            {
                int index = layers[i];
                var next  = net.Layers[index];
                if (next.OutW == first.OutW && next.OutH == first.OutH)
                {
                    layer.OutC += next.OutC;
                }
                else
                {
                    layer.OutH = layer.OutW = layer.OutC = 0;
                }
            }

            return(layer);
        }
예제 #9
0
        private static Layer parse_crnn(KeyValuePair[] options, SizeParams parameters)
        {
            int        outputFilters  = OptionList.option_find_int(options, "output_filters", 1);
            int        hiddenFilters  = OptionList.option_find_int(options, "hidden_filters", 1);
            string     activationS    = OptionList.option_find_str(options, "activation", "logistic");
            Activation activation     = ActivationsHelper.Get_activation(activationS);
            bool       batchNormalize = OptionList.option_find_int_quiet(options, "batch_normalize", 0) != 0;

            Layer l = Layer.make_crnn_layer(parameters.Batch, parameters.W, parameters.H, parameters.C, hiddenFilters, outputFilters, parameters.TimeSteps, activation, batchNormalize);

            l.Shortcut = OptionList.option_find_int_quiet(options, "shortcut", 0) != 0;

            return(l);
        }
예제 #10
0
        private static Layer parse_softmax(KeyValuePair[] options, SizeParams parameters)
        {
            int   groups = OptionList.option_find_int_quiet(options, "groups", 1);
            Layer layer  = Layer.make_softmax_layer(parameters.Batch, parameters.Inputs, groups);

            layer.Temperature = OptionList.option_find_float_quiet(options, "temperature", 1);
            string treeFile = OptionList.option_find_str(options, "tree", "");

            if (!string.IsNullOrEmpty(treeFile))
            {
                layer.SoftmaxTree = new Tree(treeFile);
            }
            return(layer);
        }
예제 #11
0
        private static Layer parse_avgpool(KeyValuePair[] options, SizeParams parameters)
        {
            int batch, w, h, c;

            w     = parameters.W;
            h     = parameters.H;
            c     = parameters.C;
            batch = parameters.Batch;
            if (!(h != 0 && w != 0 && c != 0))
            {
                Utils.Error("Layer before avgpool Layer must output image.");
            }

            return(Layer.make_avgpool_layer(batch, w, h, c));
        }
예제 #12
0
        private static Layer parse_activation(KeyValuePair[] options, SizeParams parameters)
        {
            string     activationS = OptionList.option_find_str(options, "activation", "linear");
            Activation activation  = ActivationsHelper.Get_activation(activationS);

            Layer l = Layer.make_activation_layer(parameters.Batch, parameters.Inputs, activation);

            l.OutH = parameters.H;
            l.OutW = parameters.W;
            l.OutC = parameters.C;
            l.H    = parameters.H;
            l.W    = parameters.W;
            l.C    = parameters.C;

            return(l);
        }
예제 #13
0
        private static Layer parse_reorg(KeyValuePair[] options, SizeParams parameters)
        {
            int  stride  = OptionList.option_find_int(options, "stride", 1);
            bool reverse = OptionList.option_find_int_quiet(options, "reverse", 0) != 0;

            int batch, h, w, c;

            h     = parameters.H;
            w     = parameters.W;
            c     = parameters.C;
            batch = parameters.Batch;
            if (!(h != 0 && w != 0 && c != 0))
            {
                Utils.Error("Layer before reorg Layer must output image.");
            }

            return(Layer.make_reorg_layer(batch, w, h, c, stride, reverse));
        }
예제 #14
0
        private static Layer parse_maxpool(KeyValuePair[] options, SizeParams parameters)
        {
            int stride  = OptionList.option_find_int(options, "stride", 1);
            int size    = OptionList.option_find_int(options, "size", stride);
            int padding = OptionList.option_find_int_quiet(options, "padding", (size - 1) / 2);

            int batch, h, w, c;

            h     = parameters.H;
            w     = parameters.W;
            c     = parameters.C;
            batch = parameters.Batch;
            if (!(h != 0 && w != 0 && c != 0))
            {
                Utils.Error("Layer before maxpool Layer must output image.");
            }

            return(Layer.make_maxpool_layer(batch, h, w, c, size, stride, padding));
        }
예제 #15
0
        private static Layer parse_convolutional(KeyValuePair[] options, SizeParams parameters)
        {
            int n       = OptionList.option_find_int(options, "filters", 1);
            int size    = OptionList.option_find_int(options, "size", 1);
            int stride  = OptionList.option_find_int(options, "stride", 1);
            int pad     = OptionList.option_find_int_quiet(options, "pad", 0);
            int padding = OptionList.option_find_int_quiet(options, "padding", 0);

            if (pad != 0)
            {
                padding = size / 2;
            }

            string     activationS = OptionList.option_find_str(options, "activation", "logistic");
            Activation activation  = ActivationsHelper.Get_activation(activationS);

            int batch, h, w, c;

            h     = parameters.H;
            w     = parameters.W;
            c     = parameters.C;
            batch = parameters.Batch;
            if (!(h != 0 && w != 0 && c != 0))
            {
                Utils.Error("Layer before convolutional Layer must output image.");
            }
            bool batchNormalize = OptionList.option_find_int_quiet(options, "batch_normalize", 0) != 0;
            bool binary         = OptionList.option_find_int_quiet(options, "binary", 0) != 0;
            bool xnor           = OptionList.option_find_int_quiet(options, "xnor", 0) != 0;

            Layer layer = Layer.make_convolutional_layer(batch, h, w, c, n, size, stride, padding, activation, batchNormalize, binary, xnor, parameters.Net.Adam);

            layer.Flipped = OptionList.option_find_int_quiet(options, "flipped", 0);
            layer.Dot     = OptionList.option_find_float_quiet(options, "dot", 0);
            if (parameters.Net.Adam)
            {
                layer.B1  = parameters.Net.B1;
                layer.B2  = parameters.Net.B2;
                layer.Eps = parameters.Net.Eps;
            }

            return(layer);
        }
예제 #16
0
        private static Layer parse_shortcut(KeyValuePair[] options, SizeParams parameters, Network net)
        {
            string l     = OptionList.option_find(options, "from");
            int    index = int.Parse(l);

            if (index < 0)
            {
                index = parameters.Index + index;
            }

            int   batch = parameters.Batch;
            Layer from  = net.Layers[index];

            Layer s = Layer.make_shortcut_layer(batch, index, parameters.W, parameters.H, parameters.C, from.OutW, from.OutH, from.OutC);

            string     activationS = OptionList.option_find_str(options, "activation", "linear");
            Activation activation  = ActivationsHelper.Get_activation(activationS);

            s.Activation = activation;
            return(s);
        }
예제 #17
0
        private static Layer parse_detection(KeyValuePair[] options, SizeParams parameters)
        {
            int   coords  = OptionList.option_find_int(options, "coords", 1);
            int   classes = OptionList.option_find_int(options, "classes", 1);
            bool  rescore = OptionList.option_find_int(options, "rescore", 0) != 0;
            int   num     = OptionList.option_find_int(options, "num", 1);
            int   side    = OptionList.option_find_int(options, "side", 7);
            Layer layer   = Layer.make_detection_layer(parameters.Batch, parameters.Inputs, num, side, classes, coords, rescore);

            layer.Softmax = OptionList.option_find_int(options, "softmax", 0) != 0;
            layer.Sqrt    = OptionList.option_find_int(options, "sqrt", 0) != 0;

            layer.MaxBoxes      = OptionList.option_find_int_quiet(options, "max", 30);
            layer.CoordScale    = OptionList.option_find_float(options, "coord_scale", 1);
            layer.Forced        = OptionList.option_find_int(options, "forced", 0);
            layer.ObjectScale   = OptionList.option_find_float(options, "object_scale", 1);
            layer.NoobjectScale = OptionList.option_find_float(options, "noobject_scale", 1);
            layer.ClassScale    = OptionList.option_find_float(options, "class_scale", 1);
            layer.Jitter        = OptionList.option_find_float(options, "jitter", .2f);
            layer.Random        = OptionList.option_find_int_quiet(options, "random", 0) != 0;
            layer.Reorg         = OptionList.option_find_int_quiet(options, "reorg", 0);
            return(layer);
        }
예제 #18
0
        private static Layer parse_local(KeyValuePair[] options, SizeParams parameters)
        {
            int        n           = OptionList.option_find_int(options, "filters", 1);
            int        size        = OptionList.option_find_int(options, "size", 1);
            int        stride      = OptionList.option_find_int(options, "stride", 1);
            int        pad         = OptionList.option_find_int(options, "pad", 0);
            string     activationS = OptionList.option_find_str(options, "activation", "logistic");
            Activation activation  = ActivationsHelper.Get_activation(activationS);

            int batch, h, w, c;

            h     = parameters.H;
            w     = parameters.W;
            c     = parameters.C;
            batch = parameters.Batch;
            if (!(h != 0 && w != 0 && c != 0))
            {
                Utils.Error("Layer before local Layer must output image.");
            }

            Layer layer = new Layer(batch, h, w, c, n, size, stride, pad, activation);

            return(layer);
        }
예제 #19
0
        public static Network parse_network_cfg(string filename)
        {
            Section[] sections = read_cfg(filename);
            if (sections.Length < 1)
            {
                Utils.Error("Config file has no Sections");
            }
            var        n          = sections[0];
            Network    net        = new Network(sections.Length - 1);
            SizeParams parameters = new SizeParams();

            var s       = new Section(n);
            var options = s.Options;

            if (is_network(s))
            {
                Utils.Error("First Section must be [net] or [Network]");
            }
            parse_net_options(options, net);

            parameters.H         = net.H;
            parameters.W         = net.W;
            parameters.C         = net.C;
            parameters.Inputs    = net.Inputs;
            parameters.Batch     = net.Batch;
            parameters.TimeSteps = net.TimeSteps;
            parameters.Net       = net;

            ulong workspaceSize = 0;
            var   index         = 1;
            int   count         = 0;

            Console.Error.Write("Layer     filters    size              input                output\n");
            while (index < sections.Length)
            {
                n = sections[index];
                index++;
                parameters.Index = count;
                Console.Error.Write($"{count:5} ");
                s       = new Section(n);
                options = s.Options;
                Layer     l  = new Layer();
                LayerType lt = string_to_layer_type(s.Type);
                if (lt == LayerType.Convolutional)
                {
                    l = parse_convolutional(options, parameters);
                }
                else if (lt == LayerType.Local)
                {
                    l = parse_local(options, parameters);
                }
                else if (lt == LayerType.Avgpool)
                {
                    l = parse_activation(options, parameters);
                }
                else if (lt == LayerType.Rnn)
                {
                    l = parse_rnn(options, parameters);
                }
                else if (lt == LayerType.Gru)
                {
                    l = parse_gru(options, parameters);
                }
                else if (lt == LayerType.Crnn)
                {
                    l = parse_crnn(options, parameters);
                }
                else if (lt == LayerType.Connected)
                {
                    l = parse_connected(options, parameters);
                }
                else if (lt == LayerType.Crop)
                {
                    l = parse_crop(options, parameters);
                }
                else if (lt == LayerType.Cost)
                {
                    l = parse_cost(options, parameters);
                }
                else if (lt == LayerType.Region)
                {
                    l = parse_region(options, parameters);
                }
                else if (lt == LayerType.Detection)
                {
                    l = parse_detection(options, parameters);
                }
                else if (lt == LayerType.Softmax)
                {
                    l             = parse_softmax(options, parameters);
                    net.Hierarchy = l.SoftmaxTree;
                }
                else if (lt == LayerType.Normalization)
                {
                    l = parse_normalization(options, parameters);
                }
                else if (lt == LayerType.Batchnorm)
                {
                    l = parse_batchnorm(options, parameters);
                }
                else if (lt == LayerType.Maxpool)
                {
                    l = parse_maxpool(options, parameters);
                }
                else if (lt == LayerType.Reorg)
                {
                    l = parse_reorg(options, parameters);
                }
                else if (lt == LayerType.Avgpool)
                {
                    l = parse_avgpool(options, parameters);
                }
                else if (lt == LayerType.Route)
                {
                    l = parse_route(options, parameters, net);
                }
                else if (lt == LayerType.Shortcut)
                {
                    l = parse_shortcut(options, parameters, net);
                }
                else if (lt == LayerType.Dropout)
                {
                    l           = parse_dropout(options, parameters);
                    l.Output    = net.Layers[count - 1].Output;
                    l.Delta     = net.Layers[count - 1].Delta;
                    l.OutputGpu = net.Layers[count - 1].OutputGpu;
                    l.DeltaGpu  = net.Layers[count - 1].DeltaGpu;
                }
                else
                {
                    Console.Error.Write($"LayerType not recognized: {s.Type}\n");
                }
                l.Dontload       = OptionList.option_find_int_quiet(options, "dontload", 0) != 0;
                l.Dontloadscales = OptionList.option_find_int_quiet(options, "dontloadscales", 0) != 0;
                OptionList.option_unused(options);
                net.Layers[count] = l;
                if (l.WorkspaceSize > workspaceSize)
                {
                    workspaceSize = l.WorkspaceSize;
                }
                ++count;
                if (index + 1 < sections.Length)
                {
                    parameters.H      = l.OutH;
                    parameters.W      = l.OutW;
                    parameters.C      = l.OutC;
                    parameters.Inputs = l.Outputs;
                }
            }
            net.Outputs = Network.get_network_output_size(net);
            net.Output  = Network.get_network_output(net);
            if (workspaceSize != 0)
            {
                if (CudaUtils.UseGpu)
                {
                    net.Workspace = new float[(workspaceSize - 1) / sizeof(float) + 1];
                }
                else
                {
                    net.Workspace = new float[1];
                }
            }
            return(net);
        }
예제 #20
0
 private static Layer parse_batchnorm(KeyValuePair[] options, SizeParams parameters)
 {
     return(Layer.make_batchnorm_layer(parameters.Batch, parameters.W, parameters.H, parameters.C));
 }