Example #1
0
        public void AddLayer(LayerBase layer)
        {
            int inputWidth = 0, inputHeight = 0, inputDepth = 0;
            if (this.layers.Count > 0)
            {
                inputWidth = this.layers[this.layers.Count - 1].OutputWidth;
                inputHeight = this.layers[this.layers.Count - 1].OutputHeight;
                inputDepth = this.layers[this.layers.Count - 1].OutputDepth;
            }

            var classificationLayer = layer as IClassificationLayer;
            if (classificationLayer != null)
            {
                var fullyConnLayer = new FullyConnLayer(classificationLayer.ClassCount);
                fullyConnLayer.Init(inputWidth, inputHeight, inputDepth);
                inputWidth = fullyConnLayer.OutputWidth;
                inputHeight = fullyConnLayer.OutputHeight;
                inputDepth = fullyConnLayer.OutputDepth;

                this.layers.Add(fullyConnLayer);
            }

            var regressionLayer = layer as RegressionLayer;
            if (regressionLayer != null)
            {
                var fullyConnLayer = new FullyConnLayer(regressionLayer.NeuronCount);
                fullyConnLayer.Init(inputWidth, inputHeight, inputDepth);
                inputWidth = fullyConnLayer.OutputWidth;
                inputHeight = fullyConnLayer.OutputHeight;
                inputDepth = fullyConnLayer.OutputDepth;

                this.layers.Add(fullyConnLayer);
            }

            var dotProductLayer = layer as IDotProductLayer;
            if (dotProductLayer != null)
            {
                if (dotProductLayer.Activation == Activation.Relu)
                {
                    dotProductLayer.BiasPref = 0.1; // relus like a bit of positive bias to get gradients early
                    // otherwise it's technically possible that a relu unit will never turn on (by chance)
                    // and will never get any gradient and never contribute any computation. Dead relu.
                }
            }

            if (this.layers.Count > 0)
            {
                layer.Init(inputWidth, inputHeight, inputDepth);
            }

            this.layers.Add(layer);

            if (dotProductLayer != null)
            {
                switch (dotProductLayer.Activation)
                {
                    case Activation.Undefined:
                        break;
                    case Activation.Relu:
                        var reluLayer = new ReluLayer();
                        reluLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                        this.layers.Add(reluLayer);
                        break;
                    case Activation.Sigmoid:
                        var sigmoidLayer = new SigmoidLayer();
                        sigmoidLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                        this.layers.Add(sigmoidLayer);
                        break;
                    case Activation.Tanh:
                        var tanhLayer = new TanhLayer();
                        tanhLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                        this.layers.Add(tanhLayer);
                        break;
                    case Activation.Maxout:
                        var maxoutLayer = new MaxoutLayer { GroupSize = dotProductLayer.GroupSize };
                        maxoutLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                        this.layers.Add(maxoutLayer);
                        break;
                    default:
                        throw new ArgumentOutOfRangeException();
                }
            }

            var lastLayer = this.layers[this.layers.Count - 1];

            if (!(layer is DropOutLayer) && layer.DropProb.HasValue)
            {
                var dropOutLayer = new DropOutLayer(layer.DropProb.Value);
                dropOutLayer.Init(lastLayer.OutputWidth, lastLayer.OutputHeight, lastLayer.OutputDepth);
                this.layers.Add(dropOutLayer);
            }
        }
Example #2
0
        public void AddLayer(LayerBase layer)
        {
            int       inputWidth = 0, inputHeight = 0, inputDepth = 0;
            LayerBase lastLayer = null;

            if (this.Layers.Count > 0)
            {
                inputWidth  = this.Layers[this.Layers.Count - 1].OutputWidth;
                inputHeight = this.Layers[this.Layers.Count - 1].OutputHeight;
                inputDepth  = this.Layers[this.Layers.Count - 1].OutputDepth;
                lastLayer   = this.Layers[this.Layers.Count - 1];
            }
            else if (!(layer is InputLayer))
            {
                throw new ArgumentException("First layer should be an InputLayer");
            }

            var classificationLayer = layer as IClassificationLayer;

            if (classificationLayer != null)
            {
                var fullconLayer = lastLayer as FullyConnLayer;
                if (fullconLayer == null)
                {
                    throw new ArgumentException($"Previously added layer should be a FullyConnLayer with {classificationLayer.ClassCount} Neurons");
                }

                if (fullconLayer.NeuronCount != classificationLayer.ClassCount)
                {
                    throw new ArgumentException($"Previous FullyConnLayer should have {classificationLayer.ClassCount} Neurons");
                }
            }

            var regressionLayer = layer as RegressionLayer;

            if (regressionLayer != null)
            {
                var fullconLayer = lastLayer as FullyConnLayer;
                if (fullconLayer == null)
                {
                    throw new ArgumentException("Previously added layer should be a FullyConnLayer");
                }
            }

            var reluLayer = layer as ReluLayer;

            if (reluLayer != null)
            {
                var dotProductLayer = lastLayer as IDotProductLayer;
                if (dotProductLayer != null)
                {
                    dotProductLayer.BiasPref = 0.1; // relus like a bit of positive bias to get gradients early
                                                    // otherwise it's technically possible that a relu unit will never turn on (by chance)
                                                    // and will never get any gradient and never contribute any computation. Dead relu.
                }
            }

            if (this.Layers.Count > 0)
            {
                layer.Init(inputWidth, inputHeight, inputDepth);
            }

            this.Layers.Add(layer);
        }