//List<Vol> filters; public ConvLayer(Layer_def opt) { // required out_depth = opt.filters; sx = opt.sx; // filter size. Should be odd if possible, it's cleaner. in_depth = opt.in_depth; in_sx = opt.in_sx; in_sy = opt.in_sy; // optional this.sy = opt.sy != 0 ? opt.sy : this.sx; this.stride = opt.stride != 0 ? opt.stride : 1; // stride at which we apply filters to input volume this.pad = opt.pad != 0 ? opt.pad : 0; // amount of 0 padding to add around borders of input volume this.l1_decay_mul = opt.l1_decay_mul != 0 ? opt.l1_decay_mul : 0.0; this.l2_decay_mul = opt.l2_decay_mul != 0 ? opt.l2_decay_mul : 1.0; // computed // note we are doing floor, so if the strided convolution of the filter doesnt fit into the input // volume exactly, the output volume will be trimmed and not contain the (incomplete) computed // final application. this.out_sx = Convert.ToInt32(Math.Floor((double)(this.in_sx + this.pad * 2 - this.sx) / this.stride + 1)); this.out_sy = Convert.ToInt32(Math.Floor((double)(this.in_sy + this.pad * 2 - this.sy) / this.stride + 1)); this.layer_type = "conv"; // initializations var bias = opt.bias_pref != 0 ? opt.bias_pref : 0.0; this.filters = new List <Vol>(); for (var i = 0; i < this.out_depth; i++) { this.filters.Add(new Vol(this.sx, this.sy, this.in_depth)); } this.biases = new Vol(1, 1, this.out_depth, bias); }
public FullyConnLayer(Layer_def opt) { // required // ok fine we will allow 'filters' as the word as well this.out_depth = opt.num_neurons != 0 ? opt.num_neurons : opt.filters; // optional this.l1_decay_mul = opt.l1_decay_mul != 0 ? opt.l1_decay_mul : 0.0; this.l2_decay_mul = opt.l2_decay_mul != 0 ? opt.l2_decay_mul : 1.0; // computed this.num_inputs = opt.in_sx * opt.in_sy * opt.in_depth; this.out_sx = 1; this.out_sy = 1; this.layer_type = "fc"; // initializations var bias = opt.bias_pref != 0 ? opt.bias_pref : 1.0; this.filters = new List <Vol>(); for (var i = 0; i < this.out_depth; i++) { this.filters.Add(new Vol(1, 1, this.num_inputs)); } this.biases = new Vol(1, 1, this.out_depth, bias); }
public ReluLayer(Layer_def opt) { this.out_sx = opt.in_sx; this.out_sy = opt.in_sy; this.out_depth = opt.in_depth; this.layer_type = "relu"; }
public TanhLayer(Layer_def opt) { this.out_sx = opt.in_sx; this.out_sy = opt.in_sy; this.out_depth = opt.in_depth; this.layer_type = "tanh"; }
public SigmoidLayer(Layer_def opt) { // computed this.out_sx = opt.in_sx; this.out_sy = opt.in_sy; this.out_depth = opt.in_depth; this.layer_type = "sigmoid"; }
public SVMLayer(Layer_def opt) { // computed this.num_inputs = opt.in_sx * opt.in_sy * opt.in_depth; this.out_depth = this.num_inputs; this.out_sx = 1; this.out_sy = 1; this.layer_type = "svm"; }
public RegressionLayer(Layer_def opt) { // computed this.num_inputs = opt.in_sx * opt.in_sy * opt.in_depth; this.out_depth = this.num_inputs; this.out_sx = 1; this.out_sy = 1; this.layer_type = "regression"; }
public SoftmaxLayer(Layer_def opt) { // computed this.num_inputs = opt.in_sx * opt.in_sy * opt.in_depth; out_depth = this.num_inputs; this.out_sx = 1; this.out_sy = 1; this.layer_type = "softmax"; es = Convnet_util.zeros(out_depth); }
public DropoutLayer(Layer_def opt) { // computed this.out_sx = opt.in_sx; this.out_sy = opt.in_sy; this.out_depth = opt.in_depth; this.layer_type = "dropout"; this.drop_prob = 0.5; this.dropped = Convnet_util.zeros_bool(this.out_sx * this.out_sy * this.out_depth); rnd = new Random(); }
public MaxoutLayer(Layer_def opt) { this.group_size = opt.group_size != 0 ? opt.group_size : 2; // computed this.out_sx = opt.in_sx; this.out_sy = opt.in_sy; this.out_depth = Convert.ToInt32(Math.Floor((double)opt.in_depth / this.group_size)); this.layer_type = "maxout"; switches = new int[out_depth]; }
public InputLayer(Layer_def opt) { // required: depth out_depth = opt.out_depth; // optional: default these dimensions to 1 out_sx = opt.out_sx != 0 ? opt.out_sx : 1; out_sy = opt.out_sy != 0 ? opt.out_sy : 1; // computed layer_type = "input"; }
public LocalResponseNormalizationLayer(Layer_def opt) { // required this.k = opt.k; this.n = opt.n; this.alpha = opt.alpha; this.beta = opt.beta; // computed this.out_sx = opt.in_sx; this.out_sy = opt.in_sy; this.out_depth = opt.in_depth; this.layer_type = "lrn"; // checks if (this.n % 2 == 0) { Console.WriteLine("WARNING n should be odd for LRN layer"); } }
public PoolLayer(Layer_def opt) { // required this.sx = opt.sx; // filter size this.in_depth = opt.in_depth; this.in_sx = opt.in_sx; this.in_sy = opt.in_sy; // optional this.sy = opt.sy != 0 ? opt.sy : this.sx; this.stride = opt.stride != 0 ? opt.stride : 2; this.pad = opt.pad != 0 ? opt.pad : 0; // amount of 0 padding to add around borders of input volume // computed this.out_depth = this.in_depth; this.out_sx = Convert.ToInt32(Math.Floor((double)(this.in_sx + this.pad * 2 - this.sx) / this.stride + 1)); this.out_sy = Convert.ToInt32(Math.Floor((double)(this.in_sy + this.pad * 2 - this.sy) / this.stride + 1)); this.layer_type = "pool"; // store switches for x,y coordinates for where the max comes from, for each output neuron this.switchx = Convnet_util.zeros_int(this.out_sx * this.out_sy * this.out_depth); this.switchy = Convnet_util.zeros_int(this.out_sx * this.out_sy * this.out_depth); }
public void makeLayers(List <String> defs) { // few checks //assert(defs.length >= 2, 'Error! At least one input layer and one loss layer are required.'); //assert(defs[0].type == 'input', 'Error! First layer must be the input layer, to declare size of inputs'); // desugar layer_defs for adding activation, dropout layers etc List <String> new_defs = new List <String>(); for (var i = 0; i < defs.Count; i++) { Layer_def def = JsonConvert.DeserializeObject <Layer_def>(defs[i]); if (def.type == "softmax" || def.type == "svm") { // add an fc layer here, there is no reason the user should // have to worry about this and we almost always want to new_defs.Add("{ type: 'fc', num_neurons:" + def.num_classes + "}"); } else if (def.type == "regression ") { // add an fc layer here, there is no reason the user should // have to worry about this and we almost always want to new_defs.Add("{ type: 'fc', num_neurons:" + def.num_neurons + "}"); } else if (def.type == "fc" || def.type == "conv") { def.bias_pref = 0.0; if (def.activation != null && def.activation == "relu") { def.bias_pref = 0.1; // relus like a bit of positive bias to get gradients early // otherwise it "s technically possible that a relu unit will never turn on (by chance) // and will never get any gradient and never contribute any computation. Dead relu. } } new_defs.Add(defs[i]); if (def.activation != null) { if (def.activation == "relu") { new_defs.Add("{ type: 'relu'}"); } else if (def.activation == "sigmoid") { new_defs.Add("{ type: 'sigmoid'}"); } else if (def.activation == "tanh") { new_defs.Add("{ type: 'tanh'}"); } else if (def.activation == "maxout") { // create maxout activation, and pass along group size, if provided var gs = def.group_size != 0 ? def.group_size : 2; new_defs.Add("{ type: 'maxout', group_size: " + gs + "}"); } else { Console.WriteLine("ERROR unsupported activation " + def.activation); } } if (def.drop_prob != null && def.type != "dropout") { new_defs.Add("{ type: 'dropout', drop_prob: " + def.drop_prob + "}"); } } defs = new_defs; // create the layers for (var i = 0; i < defs.Count; i++) { Layer_def def = JsonConvert.DeserializeObject <Layer_def>(defs[i]); if (i > 0) { var prev = this.layers[i - 1]; def.in_sx = prev.out_sx; def.in_sy = prev.out_sy; def.in_depth = prev.out_depth; } switch (def.type) { case "fc": this.layers.Add(new FullyConnLayer(def)); break; case "lrn": this.layers.Add(new LocalResponseNormalizationLayer(def)); break; case "dropout": this.layers.Add(new DropoutLayer(def)); break; case "input": this.layers.Add(new InputLayer(def)); break; case "softmax": this.layers.Add(new SoftmaxLayer(def)); break; case "regression": this.layers.Add(new RegressionLayer(def)); break; case "conv": this.layers.Add(new ConvLayer(def)); break; case "pool": this.layers.Add(new PoolLayer(def)); break; case "relu": this.layers.Add(new ReluLayer(def)); break; case "sigmoid": this.layers.Add(new SigmoidLayer(def)); break; case "tanh": this.layers.Add(new TanhLayer(def)); break; case "maxout": this.layers.Add(new MaxoutLayer(def)); break; case "svm": this.layers.Add(new SVMLayer(def)); break; default: Console.WriteLine("ERROR: UNRECOGNIZED LAYER TYPE: " + def.type); break; } } }