//get summary error from connected maps
        public void get_map_error_from_convolution()
        {//W transp*sigma_prev*f_derived(ul)
            this.error = new float[outputwidth, outputheight];
            if (conv_maps_next_layer.Count > 0)
            {
                float[,] summfold = new float[outputwidth, outputheight];
                List <float[, ]>      part_folds = new List <float[, ]>();
                ConvolutionFeatureMap cur_nl_fm;
                //1) fold of next layer's maps with their transponed kernels
                for (int k = 0; k < conv_maps_next_layer.Count; k++)
                {
                    cur_nl_fm = conv_maps_next_layer[k];
                    part_folds.Add(ConvFuncs.fold_with_transponed_kernel(cur_nl_fm.error, cur_nl_fm.weights,
                                                                         cur_nl_fm.outputwidth, cur_nl_fm.outputheight, cur_nl_fm.w, cur_nl_fm.h));

                    for (int j = 0; j < outputheight; j++)
                    {
                        for (int i = 0; i < outputwidth; i++)
                        {
                            summfold[i, j] += part_folds[k][i, j];
                        }
                    }
                }

                for (int j = 0; j < outputheight; j++)
                {
                    for (int i = 0; i < outputwidth; i++)
                    {
                        error[i, j] = ActFuncs.f_act_linear_deriv(non_activated_stage[i, j]) * summfold[i, j];
                        b          += error[i, j];
                    }
                }
            }
        }
 public void correct_weights()
 {
     foreach (var input in inputs)
     {
         float[,] folderr = ConvFuncs.fold_with_transponed_kernel(input, error, w, h, outputwidth, outputheight);
         for (int j = 0; j < h; j++)
         {
             for (int i = 0; i < w; i++)
             {
                 weights[i, j] += folderr[j, i];
                 b             += error[i, j];
             }
         }
     }
 }