예제 #1
0
        public FCSuperResolution()
        {
            superres_enc_front = InputLayer.Create(StartSide, 3);
            superres_enc_back  = ActivationLayer.Create <LeakyReLU>();

            superres_enc_front.Append(
                FCLayer.Create(16, 16).Append(
                    ActivationLayer.Create <LeakyReLU>().Append(
                        FCLayer.Create(8, 8).Append(
                            ActivationLayer.Create <LeakyReLU>().Append(
                                FCLayer.Create(4, 4).Append(
                                    superres_enc_back
                                    ))))));

            superres_dec_front = InputLayer.Create(4, 4);
            superres_dec_back  = ActivationLayer.Create <Tanh>();

            superres_dec_front.Append(
                FCLayer.Create(8, 8).Append(
                    ActivationLayer.Create <LeakyReLU>().Append(
                        FCLayer.Create(16, 8).Append(
                            ActivationLayer.Create <LeakyReLU>().Append(
                                FCLayer.Create(32, 3).Append(
                                    superres_dec_back
                                    ))))));

            superres_enc_back.Append(superres_dec_front);

            //Initialize Weights
            superres_enc_front.SetupInternalState();
            superres_enc_front.InitializeWeights(new UniformWeightInitializer(0, 0.001f));
        }
예제 #2
0
        public DGAN()
        {
            discriminator      = InputLayer.Create(StartSide, 1);
            discriminator_back = ActivationLayer.Create <Sigmoid>();
            discriminator.Append(
                FCLayer.Create(1, 256).Append(
                    ActivationLayer.Create <LeakyReLU>().Append(
                        FCLayer.Create(1, 1).Append(
                            discriminator_back
                            ))));

            generator      = InputLayer.Create(1, LatentSize);
            generator_back = ActivationLayer.Create <Tanh>();
            generator.Append(
                FCLayer.Create(1, 256).Append(
                    ActivationLayer.Create <LeakyReLU>().Append(
                        DropoutLayer.Create(0.5f).Append(
                            FCLayer.Create(1, 512).Append(
                                ActivationLayer.Create <LeakyReLU>().Append(
                                    DropoutLayer.Create(0.5f).Append(
                                        FCLayer.Create(1, OutputSize).Append(
                                            generator_back
                                            ))))))));

            //Initialize Weights
            discriminator.SetupInternalState();
            discriminator.InitializeWeights(new UniformWeightInitializer(3, 0));

            generator.SetupInternalState();
            generator.InitializeWeights(new UniformWeightInitializer(1, 0));
        }
예제 #3
0
    private void Awake()
    {
        _rng = new Rng(1234);

        DataManager.LoadFloatData();

        // Create convolution layers

        _layers = new List <ConvLayer2D>();
        var l1 = ConvLayer2D.Create(DataManager.Width, DataManager.Channels, 5, 1, 0, 4).Value;

        _layers.Add(l1);
        var l2 = ConvLayer2D.Create(l1.OutWidth, l1.NumFilters, 3, 3, 0, 4).Value;

        _layers.Add(l2);
        var l3 = ConvLayer2D.Create(l2.OutWidth, l2.NumFilters, 3, 1, 0, 4).Value;

        _layers.Add(l3);

        var last         = l3;
        int convOutCount = last.OutWidth * last.OutWidth * last.NumFilters;

        Debug.Log("Conv out neuron count: " + convOutCount);

        _fcLayer = new FCLayer(10, convOutCount);

        // Parameter initialization

        for (int i = 0; i < _layers.Count; i++)
        {
            NeuralMath.RandomGaussian(ref _rng, _layers[i].Kernel, 0f, 0.25f);
            NeuralMath.RandomGaussian(ref _rng, _layers[i].Bias, 0f, 0.1f);
        }

        NeuralMath.RandomGaussian(ref _rng, _fcLayer.Biases, 0f, 0.1f);
        NeuralMath.RandomGaussian(ref _rng, _fcLayer.Weights, 0f, 0.1f);

        // Create debug textures

        _layerTex = new List <Conv2DLayerTexture>(_layers.Count);
        for (int i = 0; i < _layers.Count; i++)
        {
            _layerTex.Add(new Conv2DLayerTexture(_layers[i]));
        }

        // Create the training structure

        _batch         = new NativeArray <int>(BatchSize, Allocator.Persistent, NativeArrayOptions.ClearMemory);
        _targetOutputs = new NativeArray <float>(OutputClassCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory);
        _dCdO          = new NativeArray <float>(OutputClassCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory);
        _input         = new NativeArray <float>(DataManager.ImgDims * DataManager.Channels, Allocator.Persistent, NativeArrayOptions.UninitializedMemory);
    }
예제 #4
0
    /// <summary>
    /// Instantiates Layer from LayerData and sets initial values.
    /// </summary>
    /// <param name="l"></param>
    /// <param name="input"></param>
    /// <param name="isOutput"></param>
    /// <returns></returns>
    GameObject InstantiateLayer(LayerData l, GameObject input, bool isOutput)
    {
        switch (l.type)
        {
        case LayerType.IMAGE:
        {
            GameObject go      = GetImageLayerPrefab();
            GameObject inst    = Instantiate(go);
            ImageLayer imLayer = inst.GetComponent <ImageLayer>();
            imLayer.reducedResolution = new Vector2Int(l.activationShape[1], l.activationShape[2]);
            imLayer.fullResolution    = new Vector2Int(l.activationShape[1], l.activationShape[2]);
            imLayer.pixelSpacing      = 0.025f;
            imLayer.depth             = l.activationShape[3];

            GameObject canvas        = GameObject.FindGameObjectWithTag("Canvas");
            Material   pixelMaterial = canvas.GetComponent <GuiManager>().pixelMaterial;

            imLayer.rgb = false;
            MeshRenderer meshRenderer = inst.GetComponent <MeshRenderer>();
            meshRenderer.sharedMaterials[0] = pixelMaterial;

            imLayer.SetActivationTensorShape(l.activationShape);
            for (int i = 0; i < l.activationTensors.Count; i++)
            {
                imLayer.SetActivationTensorForEpoch(l.activationTensors[i], i);
            }

            return(inst);
        }

        case LayerType.CONV:
        {
            GameObject go        = GetConvLayerPrefab();
            GameObject inst      = Instantiate(go);
            ConvLayer  convLayer = inst.GetComponent <ConvLayer>();
            convLayer.convShape      = new Vector2Int(l.weightShape[0], l.weightShape[1]);
            convLayer.reducedDepth   = l.weightShape[3];
            convLayer.fullDepth      = l.weightShape[3];
            convLayer.input          = input;
            convLayer.filterSpread   = 1.0f;
            convLayer.lineCircleGrid = 2.0f;
            convLayer.filterSpacing  = 0.025f;

            convLayer.SetWeightTensorShape(l.weightShape);
            for (int i = 0; i < l.weightTensors.Count; i++)
            {
                convLayer.SetWeightTensorForEpoch(l.weightTensors[i], i);
            }

            convLayer.SetActivationTensorShape(l.activationShape);
            for (int i = 0; i < l.activationTensors.Count; i++)
            {
                convLayer.SetActivationTensorForEpoch(l.activationTensors[i], i);
            }
            return(inst);
        }

        case LayerType.MAXPOOL:
        {
            GameObject   go      = GetMaxPoolLayerPrefab();
            GameObject   inst    = Instantiate(go);
            MaxPoolLayer mpLayer = inst.GetComponent <MaxPoolLayer>();
            mpLayer.filterSpacing = 0.025f;
            mpLayer.zOffset       = 0.25f;
            mpLayer.input         = input;

            mpLayer.SetActivationTensorShape(l.activationShape);
            for (int i = 0; i < l.activationTensors.Count; i++)
            {
                mpLayer.SetActivationTensorForEpoch(l.activationTensors[i], i);
            }
            return(inst);
        }

        case LayerType.FC:
        {
            GameObject go      = GetFCLayerPrefab();
            GameObject inst    = Instantiate(go);
            FCLayer    fcLayer = inst.GetComponent <FCLayer>();
            fcLayer.input         = input;
            fcLayer.filterSpacing = 0.025f;
            fcLayer.reducedDepth  = l.weightShape[1];
            fcLayer.fullDepth     = l.weightShape[1];

            //TODO: here loading is reducing non output fc layers automatically by 4
            if (!l.name.Contains("out"))
            {
                fcLayer.reducedDepth = l.weightShape[1];
            }
            else
            {
                fcLayer.lineCircleGrid = 0;
            }
            if (l.name.Contains("0"))
            {
                fcLayer.collapseInput = 1f;
            }
            //fcLayer.edgeBundle = 1.0f;
            fcLayer.SetTensorShape(l.weightShape);
            for (int i = 0; i < l.weightTensors.Count; i++)
            {
                fcLayer.SetTensorForEpoch(l.weightTensors[i], i);
            }

            fcLayer.SetActivationTensorShape(l.activationShape);
            for (int i = 0; i < l.activationTensors.Count; i++)
            {
                fcLayer.SetActivationTensorForEpoch(l.activationTensors[i], i);
            }
            return(inst);
        }

        default:
            break;
        }
        return(null);
    }
예제 #5
0
        public GAN()
        {
            #region 128x128

            /*discriminator = InputLayer.Create(StartSide, 3);
             * discriminator_back = ActivationLayer.Create<Sigmoid>();
             * discriminator.Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 126
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 124
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 122
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 120
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  pooling_1.Append(                           //o = 60
             *  ConvLayer.Create(3, 8).Append(              //o = 58
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 56
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 54
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 52
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  pooling_2.Append(                           //o = 26
             *  ConvLayer.Create(3, 8).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 22
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  pooling_3.Append(                           //o = 11
             *  ConvLayer.Create(3, 8).Append(              //o = 22
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  FCLayer.Create(1, 1).Append(
             *      discriminator_back
             * )))))))))))))))))))))))))));
             *
             * generator = InputLayer.Create(32, 8);
             * generator_back = ActivationLayer.Create<Tanh>();
             * generator.Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 3, 2).Append(              //o = 26
             *      generator_back
             * ))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));*/
            #endregion

            /*
             * discriminator = InputLayer.Create(StartSide, 3);
             * discriminator_back = ActivationLayer.Create<Sigmoid>();
             * discriminator.Append(
             *  ConvLayer.Create(3, 8).Append(              //o = 30
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 512).Append(              //o = 28
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 256).Append(              //o = 26
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 256).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 128).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 64).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 32).Append(              //o = 24
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.2f).Append(
             *  FCLayer.Create(1, 1).Append(
             *      discriminator_back
             * )))))))))))))))))))))));
             *
             * generator = InputLayer.Create(1, LatentSize);
             * generator_back = ActivationLayer.Create<Tanh>();
             * generator.Append(
             *  FCLayer.Create(1, 32).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(1, 64).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(1, 128).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(1, 256).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(1, 256).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(1, 512).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(16, 2).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  ConvLayer.Create(3, 8, 2).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *  DropoutLayer.Create(0.3f).Append(
             *  FCLayer.Create(EndSide, 3).Append(              //o = 18
             *  ActivationLayer.Create<LeakyReLU>().Append(
             *      generator_back
             * ))))))))))))))))))))))))));*/


            discriminator      = InputLayer.Create(StartSide, 3);
            discriminator_back = ActivationLayer.Create <Sigmoid>();
            discriminator.Append(
                FCLayer.Create(1, 1024).Append(
                    ActivationLayer.Create <LeakyReLU>().Append(
                        DropoutLayer.Create(0.3f).Append(
                            FCLayer.Create(1, 512).Append(
                                ActivationLayer.Create <LeakyReLU>().Append(
                                    DropoutLayer.Create(0.3f).Append(
                                        FCLayer.Create(1, 256).Append(
                                            ActivationLayer.Create <LeakyReLU>().Append(
                                                DropoutLayer.Create(0.3f).Append(
                                                    FCLayer.Create(1, 256).Append(
                                                        ActivationLayer.Create <LeakyReLU>().Append(
                                                            DropoutLayer.Create(0.3f).Append(
                                                                FCLayer.Create(1, 64).Append(
                                                                    ActivationLayer.Create <LeakyReLU>().Append(
                                                                        DropoutLayer.Create(0.3f).Append(
                                                                            FCLayer.Create(1, 64).Append(
                                                                                ActivationLayer.Create <LeakyReLU>().Append(
                                                                                    DropoutLayer.Create(0.3f).Append(
                                                                                        FCLayer.Create(1, 1).Append(
                                                                                            discriminator_back
                                                                                            ))))))))))))))))))));

            generator      = InputLayer.Create(1, LatentSize);
            generator_back = ActivationLayer.Create <Tanh>();
            generator.Append(
                FCLayer.Create(1, 128).Append(
                    ActivationLayer.Create <ReLU>().Append(
                        FCLayer.Create(1, 256).Append(
                            ActivationLayer.Create <ReLU>().Append(
                                FCLayer.Create(1, 256).Append(
                                    ActivationLayer.Create <ReLU>().Append(
                                        DropoutLayer.Create(0.5f).Append(
                                            FCLayer.Create(1, 256).Append(
                                                ActivationLayer.Create <ReLU>().Append(
                                                    FCLayer.Create(1, 512).Append(
                                                        ActivationLayer.Create <ReLU>().Append(
                                                            FCLayer.Create(1, 1024).Append(
                                                                ActivationLayer.Create <ReLU>().Append(
                                                                    FCLayer.Create(32, 3).Append(
                                                                        generator_back
                                                                        )))))))))))))));

            encoder      = InputLayer.Create(32, 3);
            encoder_back = ActivationLayer.Create <LeakyReLU>();
            encoder.Append(
                FCLayer.Create(1, 1024).Append(
                    ActivationLayer.Create <ReLU>().Append(
                        FCLayer.Create(1, 512).Append(
                            ActivationLayer.Create <ReLU>().Append(
                                FCLayer.Create(1, 256).Append(
                                    ActivationLayer.Create <ReLU>().Append(
                                        DropoutLayer.Create(0.5f).Append(
                                            FCLayer.Create(1, 256).Append(
                                                ActivationLayer.Create <ReLU>().Append(
                                                    FCLayer.Create(1, 256).Append(
                                                        ActivationLayer.Create <ReLU>().Append(
                                                            FCLayer.Create(1, 128).Append(
                                                                ActivationLayer.Create <ReLU>().Append(
                                                                    FCLayer.Create(1, LatentSize).Append(
                                                                        encoder_back
                                                                        )))))))))))))));

            //Initialize Weights
            discriminator.SetupInternalState();
            discriminator.InitializeWeights(new UniformWeightInitializer(3, 0));

            generator.SetupInternalState();
            generator.InitializeWeights(new UniformWeightInitializer(1, 0));

            encoder.SetupInternalState();
            encoder.InitializeWeights(new UniformWeightInitializer(2, 0));
        }
예제 #6
0
        public void Check()
        {
            front = InputLayer.Create(3, 1);
            back  = ActivationLayer.Create <Sigmoid>();
            conv  = ConvLayer.Create(2, 1);
            fc    = FCLayer.Create(1, 1);

            front.Append(
                conv.Append(
                    ConvLayer.Create(3, 3, 2).Append(
                        ActivationLayer.Create <LeakyReLU>().Append(
                            fc.Append(
                                FCLayer.Create(1, 1).Append(
                                    back
                                    ))))));

            front.SetupInternalState();
            front.InitializeWeights(new UniformWeightInitializer(0, 0)); //ConstantWeightInitializer());

            var lossFunc  = new Quadratic();
            var optimizer = new SGD(0.7f);

            Matrix x0 = new Matrix(9, 1, MemoryFlags.ReadWrite, false);

            x0.Write(new float[] { 0, 0, 0, 0, 0, 0, 0, 0, 0 });

            Matrix x1 = new Matrix(9, 1, MemoryFlags.ReadWrite, false);

            x1.Write(new float[] { 0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f, 0.9f });

            Matrix x2 = new Matrix(9, 1, MemoryFlags.ReadWrite, false);

            x2.Write(new float[] { 0, 0, 0, 1, 1, 1, 1, 1, 1 });

            Matrix x3 = new Matrix(9, 1, MemoryFlags.ReadWrite, false);

            x3.Write(new float[] { 1, 1, 1, 1, 1, 1, 1, 1, 1 });



            var loss_vec = new Matrix(1, 1, MemoryFlags.ReadWrite, true);
            var x        = new Matrix[] { x0, x1, x2, x3 };
            var y        = new Matrix[]
            {
                new Matrix(1, 1, MemoryFlags.ReadWrite, true),
                new Matrix(1, 1, MemoryFlags.ReadWrite, true),
                new Matrix(1, 1, MemoryFlags.ReadWrite, true),
                new Matrix(1, 1, MemoryFlags.ReadWrite, true),
            };

            y[0].Write(new float[] { 0.53f });
            y[1].Write(new float[] { 0.77f });
            y[2].Write(new float[] { 0.88f });
            y[3].Write(new float[] { 1.1f });

            float delta = 1e-1f;
            float orig_loss_deriv = 0;
            float norm_conv = 0.0f, norm_conv_net = 0.0f;
            float norm_fc = 0.0f, norm_fc_net = 0.0f;

            for (int epoch = 0; epoch < 1; epoch++)
            {
                for (int idx = 1; idx < x.Length - 2; idx++)
                {
                    {
                        var output = front.ForwardPropagate(x[idx]);

                        //Compute loss deriv
                        loss_vec.Clear();
                        lossFunc.LossDeriv(output[0], y[idx], loss_vec, 0);
                        orig_loss_deriv = loss_vec.Memory[0];

                        back.ComputeGradients(loss_vec);
                        back.ComputeLayerErrors(loss_vec);
                    }

                    {
                        //Save weights and apply deltas
                        var conv_l = conv.CurrentLayer as ConvLayer;

                        for (int f_i = 0; f_i < conv_l.FilterCnt; f_i++)
                        {
                            for (int i_i = 0; i_i < conv_l.InputDepth; i_i++)
                            {
                                for (int f_y = 0; f_y < conv_l.FilterSz; f_y++)
                                {
                                    for (int f_x = 0; f_x < conv_l.FilterSz; f_x++)
                                    {
                                        var w_delta = conv_l.WeightErrors[f_i][i_i].Memory[f_y * conv_l.FilterSz + f_x];

                                        conv_l.Weights[f_i][i_i].Memory[f_y * conv_l.FilterSz + f_x] += delta;
                                        var output = front.ForwardPropagate(x[idx]);
                                        loss_vec.Clear();
                                        lossFunc.Loss(output[0], y[idx], loss_vec, 0);
                                        var y1 = loss_vec.Memory[0];

                                        conv_l.Weights[f_i][i_i].Memory[f_y * conv_l.FilterSz + f_x] -= 2 * delta;
                                        output = front.ForwardPropagate(x[idx]);
                                        loss_vec.Clear();
                                        lossFunc.Loss(output[0], y[idx], loss_vec, 0);
                                        var y0 = loss_vec.Memory[0];

                                        conv_l.Weights[f_i][i_i].Memory[f_y * conv_l.FilterSz + f_x] += delta;

                                        var deriv = (y1 - y0) / (2 * delta);
                                        var norm  = ((w_delta - deriv) * (w_delta - deriv)) / ((w_delta + deriv) * (w_delta + deriv));
                                        norm_conv += norm;
                                        norm_conv_net++;
                                    }
                                }
                            }
                        }

                        var fc_l = fc.CurrentLayer as FCLayer;

                        for (int i = 0; i < fc_l.Weights.Rows * fc_l.Weights.Columns; i++)
                        {
                            var w_delta = fc_l.WeightDelta.Memory[i];

                            fc_l.Weights.Memory[i] += delta;
                            var output = front.ForwardPropagate(x[idx]);
                            loss_vec.Clear();
                            lossFunc.Loss(output[0], y[idx], loss_vec, 0);
                            var y1 = loss_vec.Memory[0];

                            fc_l.Weights.Memory[i] -= 2 * delta;
                            output = front.ForwardPropagate(x[idx]);
                            loss_vec.Clear();
                            lossFunc.Loss(output[0], y[idx], loss_vec, 0);
                            var y0 = loss_vec.Memory[0];

                            fc_l.Weights.Memory[i] += delta;

                            var deriv = (y1 - y0) / (2 * delta);
                            var norm  = ((w_delta - deriv) * (w_delta - deriv)) / ((w_delta + deriv) * (w_delta + deriv));
                            norm_fc += norm;
                            norm_fc_net++;
                        }
                    }

                    {
                        back.ResetLayerErrors();
                        back.ComputeGradients(loss_vec);
                        back.ComputeLayerErrors(loss_vec);
                        back.UpdateLayers(optimizer);
                    }
                }
            }

            Console.WriteLine($"Conv Norm {norm_conv / norm_conv_net}");
            Console.WriteLine($"FC Norm {norm_fc / norm_fc_net}");
            Console.ReadLine();
        }