Example #1
0
    public double[] test(int[,] inputImage)
    {
        double[,] input = AdaptResolution(inputImage, originalResolution, inputResolution);
        int inputRes = inputResolution;                                                                                         // inputRes = 32

        /*Debug.Log(inputRes);
         * string inp = "";
         * for (int i = 0; i < inputRes; i++)
         * {
         *  for (int j = 0; j < inputRes; j++)
         *  {
         *      Debug.Log(i + " " + j + " " + input[i, j]);
         *      inp += input[i, j] + " ";
         *  }
         *  inp += "End\n";
         * }
         * Debug.Log(inp);*/

        ConvolutionalLayer layer1 = new ConvolutionalLayer(input, inputRes, 1, 6, convFiltersResolution, FirstConvolutionLayerFilters, initialization);

        inputRes = inputRes - convFiltersResolution + 1;                                                                        // inputRes = 28

        MaxpoolLayer layer2 = new MaxpoolLayer(layer1.Convolution(), inputRes, 6, poolFieldResolution);

        inputRes /= 2;                                                                                                          // inputRes = 14

        ConvolutionalLayer layer3 = new ConvolutionalLayer(layer2.Subsample(), inputRes, 6, 16, convFiltersResolution, SecondConvolutionLayerFilters, initialization);

        inputRes = inputRes - convFiltersResolution + 1;                                                                        // inputRes = 10

        MaxpoolLayer layer4 = new MaxpoolLayer(layer3.Convolution(), inputRes, 16, poolFieldResolution);

        inputRes /= 2;                                                                                                          // inputRes = 5

        InputLayer layer5 = new InputLayer(layer4.Subsample(), inputRes, 72, InputLayerFilters, initialization);

        FullyConnectedLayer layer6 = new FullyConnectedLayer(layer5.Flatten(), 72, 50, FullyConnectedLayerWeights, initialization);

        FullyConnectedLayer final = new FullyConnectedLayer(layer6.Calculate(), 50, 6, OutputLayerWeights, initialization);

        //final.Calculate();
        double[] output = final.Calculate();
        SetGoal(output);
        double[] loss    = LossFunction(output, goal);
        double[] results = GetResults(output);
        Debug.Log("Output: " + output[0] + ", " + output[1] + ", " + output[2]);
        Debug.Log("Loss: " + loss[0] + ", " + loss[1] + ", " + loss[2]);
        Debug.Log("Rezultat: " + results[0].ToString("#.000") + "%, " + results[1].ToString("#.000") + "%, " + results[2].ToString("#.000") + "%");

        if (training)
        {
            double[] dx = final.Backpropagation(loss, LearningRate);
            dx = layer6.Backpropagation(dx, LearningRate);
            double[,,] convdx = layer5.Backpropagation(dx, LearningRate);
            convdx            = layer4.Backpropagation(convdx);
            convdx            = layer3.Backpropagation(convdx, LearningRate);
            convdx            = layer2.Backpropagation(convdx);
            convdx            = layer1.Backpropagation(convdx, LearningRate);
        }
        return(output);
    }