private void backward_convolute_on_depth(UnitLayer to, int stride, int depth, int length) { int to_depth = depth + length; if (to_depth >= Depth) { to_depth = Depth; } for (int d = depth; d < to_depth; d++) { WeightLayer weights = WeightLayerPool.find(to.Id, Id, d); int width_of_weights = weights.Width, height_of_weights = weights.Height; for (int y = 0; y < Height; y++) { int top = y * stride; int bottom = top + height_of_weights; for (int x = 0; x < Width; x++) { int left = x * stride; to.diff_convolute_product(left, top, left + width_of_weights, bottom, Units[x, y, d].gradient_at_inport, weights); } } weights.diff_weights(); } }
private void forward_convolute_on_depth(UnitLayer to, int stride, int depth, int length) { int to_depth = depth + length; if (to_depth >= to.Depth) { to_depth = to.Depth; } for (int d = depth; d < to_depth; d++) { WeightLayer weights = WeightLayerPool.find(Id, to.Id, d); int width_of_weights = weights.Width, height_of_weights = weights.Height; int width_of_units = to.Width, height_of_units = to.Height; for (int y = 0; y < height_of_units; y++) { int top = y * stride; int bottom = top + height_of_weights; for (int x = 0; x < width_of_units; x++) { int left = x * stride; to.set_value_at_inport(x, y, d, weights.convolute_product(left, top, left + width_of_weights, bottom, Units)); } } } }
static public void create_convolution(UnitLayer from, UnitLayer to, int width, int height) { for (int d = 0; d < to.Depth; d++) { string id = string.Format("C/{0}/{1}/{2}", from.Id, to.Id, d); WeightLayer layer = new WeightLayer(id, width, height, from.Depth); layer.fill_weights(from.Width * from.Height * from.Depth); WeightLayers.Add(id, layer); } }
static public void create_fully_connection(UnitLayer from, UnitLayer to) { for (int d = 0; d < to.Depth; d++) { for (int y = 0; y < to.Height; y++) { for (int x = 0; x < to.Width; x++) { string id = string.Format("F/{0}/{1}/{2}/{3}/{4}", from.Id, to.Id, x, y, d); WeightLayer layer = new WeightLayer(id, from.Width, from.Height, from.Depth); layer.fill_weights(from.Width * from.Height * from.Depth); WeightLayers.Add(id, layer); } } } }
static public void load(BinaryReader reader) { int count = reader.ReadInt32(); for (int i = 0; i < count; i++) { string id = reader.ReadString(); int depth = reader.ReadInt32(); int width = reader.ReadInt32(); int height = reader.ReadInt32(); WeightLayer layer = new WeightLayer(id, width, height, depth); WeightLayers.Add(id, layer); layer.load(reader); } }
private void backward_fully_connect_on_depth(UnitLayer to, int depth, int length) { int to_depth = depth + length; if (to_depth >= Depth) { to_depth = Depth; } for (int d = depth; d < to_depth; d++) { for (int y = 0; y < Height; y++) { for (int x = 0; x < Width; x++) { WeightLayer weights = WeightLayerPool.find(to.Id, Id, x, y, d); to.diff_fully_product(Units[x, y, d].gradient_at_inport, weights); weights.diff_weights(); } } } }
private void forward_fully_connect_on_depth(UnitLayer to, int depth, int length) { int width = to.Width, height = to.Height; int to_depth = depth + length; if (to_depth >= to.Depth) { to_depth = to.Depth; } for (int d = depth; d < to_depth; d++) { for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { WeightLayer weights = WeightLayerPool.find(Id, to.Id, x, y, d); to.set_value_at_inport(x, y, d, weights.fully_product(Units)); } } } }
public void diff_convolute_product(int left, int top, int right, int bottom, double gradient, WeightLayer weights) { weights.diff_convolute_product(left, top, right, bottom, gradient, Units); }
public void diff_fully_product(double gradient, WeightLayer weights) { weights.diff_fully_product(gradient, Units); }