예제 #1
0
        public void AddLayer(LayerBase layer)
        {
            int inputWidth = 0, inputHeight = 0, inputDepth = 0;

            if (this.layers.Count > 0)
            {
                inputWidth  = this.layers[this.layers.Count - 1].OutputWidth;
                inputHeight = this.layers[this.layers.Count - 1].OutputHeight;
                inputDepth  = this.layers[this.layers.Count - 1].OutputDepth;
            }

            var classificationLayer = layer as IClassificationLayer;

            if (classificationLayer != null)
            {
                var fullyConnLayer = new FullyConnLayer(classificationLayer.ClassCount);
                fullyConnLayer.Init(inputWidth, inputHeight, inputDepth);
                inputWidth  = fullyConnLayer.OutputWidth;
                inputHeight = fullyConnLayer.OutputHeight;
                inputDepth  = fullyConnLayer.OutputDepth;

                this.layers.Add(fullyConnLayer);
            }

            var regressionLayer = layer as RegressionLayer;

            if (regressionLayer != null)
            {
                var fullyConnLayer = new FullyConnLayer(regressionLayer.NeuronCount);
                fullyConnLayer.Init(inputWidth, inputHeight, inputDepth);
                inputWidth  = fullyConnLayer.OutputWidth;
                inputHeight = fullyConnLayer.OutputHeight;
                inputDepth  = fullyConnLayer.OutputDepth;

                this.layers.Add(fullyConnLayer);
            }

            var dotProductLayer = layer as IDotProductLayer;

            if (dotProductLayer != null)
            {
                if (dotProductLayer.Activation == Activation.Relu)
                {
                    dotProductLayer.BiasPref = 0.1; // relus like a bit of positive bias to get gradients early
                    // otherwise it's technically possible that a relu unit will never turn on (by chance)
                    // and will never get any gradient and never contribute any computation. Dead relu.
                }
            }

            if (this.layers.Count > 0)
            {
                layer.Init(inputWidth, inputHeight, inputDepth);
            }

            this.layers.Add(layer);

            if (dotProductLayer != null)
            {
                switch (dotProductLayer.Activation)
                {
                case Activation.Undefined:
                    break;

                case Activation.Relu:
                    var reluLayer = new ReluLayer();
                    reluLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                    this.layers.Add(reluLayer);
                    break;

                case Activation.Sigmoid:
                    var sigmoidLayer = new SigmoidLayer();
                    sigmoidLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                    this.layers.Add(sigmoidLayer);
                    break;

                case Activation.Tanh:
                    var tanhLayer = new TanhLayer();
                    tanhLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                    this.layers.Add(tanhLayer);
                    break;

                case Activation.Maxout:
                    var maxoutLayer = new MaxoutLayer {
                        GroupSize = dotProductLayer.GroupSize
                    };
                    maxoutLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                    this.layers.Add(maxoutLayer);
                    break;

                default:
                    throw new ArgumentOutOfRangeException();
                }
            }

            var lastLayer = this.layers[this.layers.Count - 1];

            if (!(layer is DropOutLayer) && layer.DropProb.HasValue)
            {
                var dropOutLayer = new DropOutLayer(layer.DropProb.Value);
                dropOutLayer.Init(lastLayer.OutputWidth, lastLayer.OutputHeight, lastLayer.OutputDepth);
                this.layers.Add(dropOutLayer);
            }
        }
예제 #2
0
        public void AddLayer(LayerBase layer)
        {
            int inputWidth = 0, inputHeight = 0, inputDepth = 0;
            if (this.layers.Count > 0)
            {
                inputWidth = this.layers[this.layers.Count - 1].OutputWidth;
                inputHeight = this.layers[this.layers.Count - 1].OutputHeight;
                inputDepth = this.layers[this.layers.Count - 1].OutputDepth;
            }

            var classificationLayer = layer as IClassificationLayer;
            if (classificationLayer != null)
            {
                var fullyConnLayer = new FullyConnLayer(classificationLayer.ClassCount);
                fullyConnLayer.Init(inputWidth, inputHeight, inputDepth);
                inputWidth = fullyConnLayer.OutputWidth;
                inputHeight = fullyConnLayer.OutputHeight;
                inputDepth = fullyConnLayer.OutputDepth;

                this.layers.Add(fullyConnLayer);
            }

            var regressionLayer = layer as RegressionLayer;
            if (regressionLayer != null)
            {
                var fullyConnLayer = new FullyConnLayer(regressionLayer.NeuronCount);
                fullyConnLayer.Init(inputWidth, inputHeight, inputDepth);
                inputWidth = fullyConnLayer.OutputWidth;
                inputHeight = fullyConnLayer.OutputHeight;
                inputDepth = fullyConnLayer.OutputDepth;

                this.layers.Add(fullyConnLayer);
            }

            var dotProductLayer = layer as IDotProductLayer;
            if (dotProductLayer != null)
            {
                if (dotProductLayer.Activation == Activation.Relu)
                {
                    dotProductLayer.BiasPref = 0.1; // relus like a bit of positive bias to get gradients early
                    // otherwise it's technically possible that a relu unit will never turn on (by chance)
                    // and will never get any gradient and never contribute any computation. Dead relu.
                }
            }

            if (this.layers.Count > 0)
            {
                layer.Init(inputWidth, inputHeight, inputDepth);
            }

            this.layers.Add(layer);

            if (dotProductLayer != null)
            {
                switch (dotProductLayer.Activation)
                {
                    case Activation.Undefined:
                        break;
                    case Activation.Relu:
                        var reluLayer = new ReluLayer();
                        reluLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                        this.layers.Add(reluLayer);
                        break;
                    case Activation.Sigmoid:
                        var sigmoidLayer = new SigmoidLayer();
                        sigmoidLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                        this.layers.Add(sigmoidLayer);
                        break;
                    case Activation.Tanh:
                        var tanhLayer = new TanhLayer();
                        tanhLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                        this.layers.Add(tanhLayer);
                        break;
                    case Activation.Maxout:
                        var maxoutLayer = new MaxoutLayer { GroupSize = dotProductLayer.GroupSize };
                        maxoutLayer.Init(layer.OutputWidth, layer.OutputHeight, layer.OutputDepth);
                        this.layers.Add(maxoutLayer);
                        break;
                    default:
                        throw new ArgumentOutOfRangeException();
                }
            }

            var lastLayer = this.layers[this.layers.Count - 1];

            if (!(layer is DropOutLayer) && layer.DropProb.HasValue)
            {
                var dropOutLayer = new DropOutLayer(layer.DropProb.Value);
                dropOutLayer.Init(lastLayer.OutputWidth, lastLayer.OutputHeight, lastLayer.OutputDepth);
                this.layers.Add(dropOutLayer);
            }
        }