private static Layer parse_connected(KeyValuePair[] options, SizeParams parameters) { int output = OptionList.option_find_int(options, "output", 1); string activationS = OptionList.option_find_str(options, "activation", "logistic"); Activation activation = ActivationsHelper.Get_activation(activationS); bool batchNormalize = OptionList.option_find_int_quiet(options, "batch_normalize", 0) != 0; return(Layer.make_connected_layer(parameters.Batch, parameters.Inputs, output, activation, batchNormalize)); }
private static Layer parse_cost(KeyValuePair[] options, SizeParams parameters) { string typeS = OptionList.option_find_str(options, "type", "sse"); CostType type = (CostType)Enum.Parse(typeof(CostType), typeS); float scale = OptionList.option_find_float_quiet(options, "scale", 1); Layer layer = Layer.make_cost_layer(parameters.Batch, parameters.Inputs, type, scale); layer.Ratio = OptionList.option_find_float_quiet(options, "ratio", 0); return(layer); }
private static Layer parse_region(KeyValuePair[] options, SizeParams parameters) { int coords = OptionList.option_find_int(options, "coords", 4); int classes = OptionList.option_find_int(options, "classes", 20); int num = OptionList.option_find_int(options, "num", 1); Layer l = Layer.make_region_layer(parameters.Batch, parameters.W, parameters.H, num, classes, coords); l.Log = OptionList.option_find_int_quiet(options, "log", 0); l.Sqrt = OptionList.option_find_int_quiet(options, "sqrt", 0) != 0; l.Softmax = OptionList.option_find_int(options, "softmax", 0) != 0; l.MaxBoxes = OptionList.option_find_int_quiet(options, "max", 30); l.Jitter = OptionList.option_find_float(options, "jitter", .2f); l.Rescore = OptionList.option_find_int_quiet(options, "rescore", 0) != 0; l.Thresh = OptionList.option_find_float(options, "thresh", .5f); l.Classfix = OptionList.option_find_int_quiet(options, "classfix", 0); l.Absolute = OptionList.option_find_int_quiet(options, "absolute", 0); l.Random = OptionList.option_find_int_quiet(options, "random", 0) != 0; l.CoordScale = OptionList.option_find_float(options, "coord_scale", 1); l.ObjectScale = OptionList.option_find_float(options, "object_scale", 1); l.NoobjectScale = OptionList.option_find_float(options, "noobject_scale", 1); l.ClassScale = OptionList.option_find_float(options, "class_scale", 1); l.BiasMatch = OptionList.option_find_int_quiet(options, "bias_match", 0) != 0; string treeFile = OptionList.option_find_str(options, "tree", ""); if (!string.IsNullOrEmpty(treeFile)) { l.SoftmaxTree = new Tree(treeFile); } string mapFile = OptionList.option_find_str(options, "map", ""); if (!string.IsNullOrEmpty(mapFile)) { l.Map = Utils.read_map(mapFile); } string a = OptionList.option_find_str(options, "anchors", null); if (!string.IsNullOrEmpty(a)) { var lines = a.Split(','); for (int i = 0; i < lines.Length; ++i) { l.BiasesComplete[l.BiasesIndex + i] = float.Parse(lines[i]); } } return(l); }
private static Layer parse_softmax(KeyValuePair[] options, SizeParams parameters) { int groups = OptionList.option_find_int_quiet(options, "groups", 1); Layer layer = Layer.make_softmax_layer(parameters.Batch, parameters.Inputs, groups); layer.Temperature = OptionList.option_find_float_quiet(options, "temperature", 1); string treeFile = OptionList.option_find_str(options, "tree", ""); if (!string.IsNullOrEmpty(treeFile)) { layer.SoftmaxTree = new Tree(treeFile); } return(layer); }
private static Layer parse_crnn(KeyValuePair[] options, SizeParams parameters) { int outputFilters = OptionList.option_find_int(options, "output_filters", 1); int hiddenFilters = OptionList.option_find_int(options, "hidden_filters", 1); string activationS = OptionList.option_find_str(options, "activation", "logistic"); Activation activation = ActivationsHelper.Get_activation(activationS); bool batchNormalize = OptionList.option_find_int_quiet(options, "batch_normalize", 0) != 0; Layer l = Layer.make_crnn_layer(parameters.Batch, parameters.W, parameters.H, parameters.C, hiddenFilters, outputFilters, parameters.TimeSteps, activation, batchNormalize); l.Shortcut = OptionList.option_find_int_quiet(options, "shortcut", 0) != 0; return(l); }
private static Layer parse_activation(KeyValuePair[] options, SizeParams parameters) { string activationS = OptionList.option_find_str(options, "activation", "linear"); Activation activation = ActivationsHelper.Get_activation(activationS); Layer l = Layer.make_activation_layer(parameters.Batch, parameters.Inputs, activation); l.OutH = parameters.H; l.OutW = parameters.W; l.OutC = parameters.C; l.H = parameters.H; l.W = parameters.W; l.C = parameters.C; return(l); }
private static Layer parse_convolutional(KeyValuePair[] options, SizeParams parameters) { int n = OptionList.option_find_int(options, "filters", 1); int size = OptionList.option_find_int(options, "size", 1); int stride = OptionList.option_find_int(options, "stride", 1); int pad = OptionList.option_find_int_quiet(options, "pad", 0); int padding = OptionList.option_find_int_quiet(options, "padding", 0); if (pad != 0) { padding = size / 2; } string activationS = OptionList.option_find_str(options, "activation", "logistic"); Activation activation = ActivationsHelper.Get_activation(activationS); int batch, h, w, c; h = parameters.H; w = parameters.W; c = parameters.C; batch = parameters.Batch; if (!(h != 0 && w != 0 && c != 0)) { Utils.Error("Layer before convolutional Layer must output image."); } bool batchNormalize = OptionList.option_find_int_quiet(options, "batch_normalize", 0) != 0; bool binary = OptionList.option_find_int_quiet(options, "binary", 0) != 0; bool xnor = OptionList.option_find_int_quiet(options, "xnor", 0) != 0; Layer layer = Layer.make_convolutional_layer(batch, h, w, c, n, size, stride, padding, activation, batchNormalize, binary, xnor, parameters.Net.Adam); layer.Flipped = OptionList.option_find_int_quiet(options, "flipped", 0); layer.Dot = OptionList.option_find_float_quiet(options, "dot", 0); if (parameters.Net.Adam) { layer.B1 = parameters.Net.B1; layer.B2 = parameters.Net.B2; layer.Eps = parameters.Net.Eps; } return(layer); }
private static Layer parse_shortcut(KeyValuePair[] options, SizeParams parameters, Network net) { string l = OptionList.option_find(options, "from"); int index = int.Parse(l); if (index < 0) { index = parameters.Index + index; } int batch = parameters.Batch; Layer from = net.Layers[index]; Layer s = Layer.make_shortcut_layer(batch, index, parameters.W, parameters.H, parameters.C, from.OutW, from.OutH, from.OutC); string activationS = OptionList.option_find_str(options, "activation", "linear"); Activation activation = ActivationsHelper.Get_activation(activationS); s.Activation = activation; return(s); }
private static Layer parse_local(KeyValuePair[] options, SizeParams parameters) { int n = OptionList.option_find_int(options, "filters", 1); int size = OptionList.option_find_int(options, "size", 1); int stride = OptionList.option_find_int(options, "stride", 1); int pad = OptionList.option_find_int(options, "pad", 0); string activationS = OptionList.option_find_str(options, "activation", "logistic"); Activation activation = ActivationsHelper.Get_activation(activationS); int batch, h, w, c; h = parameters.H; w = parameters.W; c = parameters.C; batch = parameters.Batch; if (!(h != 0 && w != 0 && c != 0)) { Utils.Error("Layer before local Layer must output image."); } Layer layer = new Layer(batch, h, w, c, n, size, stride, pad, activation); return(layer); }
private static void parse_net_options(KeyValuePair[] options, Network net) { net.Batch = OptionList.option_find_int(options, "batch", 1); net.LearningRate = OptionList.option_find_float(options, "learning_rate", .001f); net.Momentum = OptionList.option_find_float(options, "momentum", .9f); net.Decay = OptionList.option_find_float(options, "decay", .0001f); int subdivs = OptionList.option_find_int(options, "subdivisions", 1); net.TimeSteps = OptionList.option_find_int_quiet(options, "time_steps", 1); net.Batch /= subdivs; net.Batch *= net.TimeSteps; net.Subdivisions = subdivs; net.Adam = OptionList.option_find_int_quiet(options, "adam", 0) != 0; if (net.Adam) { net.B1 = OptionList.option_find_float(options, "B1", .9f); net.B2 = OptionList.option_find_float(options, "B2", .999f); net.Eps = OptionList.option_find_float(options, "eps", .000001f); } net.H = OptionList.option_find_int_quiet(options, "height", 0); net.W = OptionList.option_find_int_quiet(options, "width", 0); net.C = OptionList.option_find_int_quiet(options, "channels", 0); net.Inputs = OptionList.option_find_int_quiet(options, "inputs", net.H * net.W * net.C); net.MaxCrop = OptionList.option_find_int_quiet(options, "max_crop", net.W * 2); net.MinCrop = OptionList.option_find_int_quiet(options, "min_crop", net.W); net.Angle = OptionList.option_find_float_quiet(options, "angle", 0); net.Aspect = OptionList.option_find_float_quiet(options, "aspect", 1); net.Saturation = OptionList.option_find_float_quiet(options, "saturation", 1); net.Exposure = OptionList.option_find_float_quiet(options, "exposure", 1); net.Hue = OptionList.option_find_float_quiet(options, "hue", 0); if (net.Inputs == 0 && !(net.H != 0 && net.W != 0 && net.C != 0)) { Utils.Error("No input parameters supplied"); } string policyS = OptionList.option_find_str(options, "policy", "constant"); net.Policy = get_policy(policyS); net.BurnIn = OptionList.option_find_int_quiet(options, "burn_in", 0); if (net.Policy == LearningRatePolicy.Step) { net.Step = OptionList.option_find_int(options, "step", 1); net.Scale = OptionList.option_find_float(options, "scale", 1); } else if (net.Policy == LearningRatePolicy.Steps) { string l = OptionList.option_find(options, "steps"); string p = OptionList.option_find(options, "scales"); if (string.IsNullOrEmpty(l) || string.IsNullOrEmpty(p)) { Utils.Error("STEPS policy must have steps and scales in cfg file"); } var lines = l.Split(','); int[] steps = new int[lines.Length]; float[] scales = new float[lines.Length]; for (var i = 0; i < lines.Length; ++i) { steps[i] = int.Parse(lines[i]); scales[i] = float.Parse(lines[i]); } net.Scales = scales; net.Steps = steps; net.NumSteps = lines.Length; } else if (net.Policy == LearningRatePolicy.Exp) { net.Gamma = OptionList.option_find_float(options, "gamma", 1); } else if (net.Policy == LearningRatePolicy.Sig) { net.Gamma = OptionList.option_find_float(options, "gamma", 1); net.Step = OptionList.option_find_int(options, "step", 1); } else if (net.Policy == LearningRatePolicy.Poly || net.Policy == LearningRatePolicy.Random) { net.Power = OptionList.option_find_float(options, "power", 1); } net.MaxBatches = OptionList.option_find_int(options, "max_batches", 0); }