public override void Setup(TensorCollection bottom, TensorCollection top)
        {
            base.Setup(bottom, top);

            int height = bottom[0].Height;
            int width  = bottom[0].Width;

            int pooledHeight = (int)(Math.Ceiling((double)(height + 2 * this.Parameters.Padding.Height - this.Parameters.Kernel.Height) / this.Parameters.Stride.Height) + 1);
            int pooledWidth  = (int)(Math.Ceiling((double)(width + 2 * this.Parameters.Padding.Width - this.Parameters.Kernel.Width) / this.Parameters.Stride.Width) + 1);

            Debug.Assert((pooledHeight - 1) * this.Parameters.Stride.Height <= height + this.Parameters.Padding.Height);
            Debug.Assert((pooledWidth - 1) * this.Parameters.Stride.Width <= width + this.Parameters.Padding.Width);

            if (this.Parameters.Padding.Height != 0 || this.Parameters.Padding.Width != 0)
            {
                // If we have padding, ensure that the last pooling starts strictly
                // inside the image (instead of at the padding); otherwise clip the last.
                if ((pooledHeight - 1) * this.Parameters.Stride.Height >= height + this.Parameters.Padding.Height)
                {
                    pooledHeight--;
                }

                if ((pooledWidth - 1) * this.Parameters.Stride.Width >= width + this.Parameters.Padding.Width)
                {
                    pooledWidth--;
                }
            }

            this.Pooled = new Size(pooledHeight, pooledWidth);
        }
        protected override void CheckBlobCount(TensorCollection bottom, TensorCollection top)
        {
            base.CheckBlobCount(bottom, top);

            Guard.That(() => bottom).IsTrue(x => x[0].Channels == bottom[1].Channels, "Channels in both bottom blobs must be equal.");
            Guard.That(() => bottom).IsTrue(x => x[0].Height == bottom[1].Height, "Height in both bottom blobs must be equal.");
            Guard.That(() => bottom).IsTrue(x => x[0].Width == bottom[1].Width, "Width in both bottom blobs must be equal.");
        }
        public override void Setup(TensorCollection bottom, TensorCollection top)
        {
            base.Setup(bottom, top);

            if (!top.Any())
            {
                top.Add(new Tensor(bottom[0]));
            }
        }
Exemple #4
0
        public virtual void Setup(TensorCollection bottom, TensorCollection top)
        {
            Contract.Requires(bottom != null);
            Contract.Requires(top != null);

            Guard.That(() => bottom).IsNotNull();
            Guard.That(() => top).IsNotNull();

            CheckBlobCount(bottom, top);
        }
Exemple #5
0
        public override void Setup(TensorCollection bottom, TensorCollection top)
        {
            CheckSizeParameters();

            this._maxTopBlobs = 1;

            base.Setup(bottom, top);

            int channels = bottom[0].Channels;

            top[0].Reshape(bottom[0].Num, channels, Pooled.Height, Pooled.Width);
        }
Exemple #6
0
        protected virtual void CheckBlobCount(TensorCollection bottom, TensorCollection top)
        {
            // Bottom layer
            if (ExactNumBottomBlobs >= 0)
            {
                if (ExactNumBottomBlobs != bottom.Count)
                {
                    throw new ArgumentException(string.Format("{0} Layer takes {1} bottom blob(s) as input.", this.GetType().Name, this.ExactNumBottomBlobs));
                }
            }

            if (MinBottomBlobs >= 0)
            {
                if (bottom.Count < MinBottomBlobs)
                {
                    throw new ArgumentOutOfRangeException(string.Format("{0} Layer takes at least {1} bottom blob(s) as input.", this.GetType().Name, this.MinBottomBlobs));
                }
            }

            if (MaxBottomBlobs >= 0)
            {
                if (bottom.Count > MaxBottomBlobs)
                {
                    throw new ArgumentOutOfRangeException(string.Format("{0} Layer takes at most {1} bottom blob(s) as input.", this.GetType().Name, this.MaxBottomBlobs));
                }
            }

            // Top layer
            if (ExactNumTopBlobs >= 0)
            {
                if (ExactNumTopBlobs != top.Count)
                {
                    throw new ArgumentException(string.Format("{0} Layer takes {1} top blob(s) as input.", this.GetType().Name, this.ExactNumTopBlobs));
                }
            }

            if (MinTopBlobs >= 0)
            {
                if (top.Count < MinTopBlobs)
                {
                    throw new ArgumentOutOfRangeException(string.Format("{0} Layer takes at least {1} top blob(s) as input.", this.GetType().Name, this.MinTopBlobs));
                }
            }

            if (MaxTopBlobs >= 0)
            {
                if (top.Count > MaxTopBlobs)
                {
                    throw new ArgumentOutOfRangeException(string.Format("{0} Layer takes at most {1} top blob(s) as input.", this.GetType().Name, this.MaxTopBlobs));
                }
            }
        }
Exemple #7
0
        public override void Setup(TensorCollection bottom, TensorCollection top)
        {
            base.Setup(bottom, top);

            var bottomBlob = bottom.First();
            var topBlob    = top.First();

            topBlob.ReshapeAs(bottomBlob);

            this.cache = Vector <double> .Build.Dense(bottom[0].Count / bottom[0].Num);

            this.scaleVector = Vector <double> .Build.Dense(bottomBlob.Num);
        }
Exemple #8
0
        public void Setup(Tensor bottom, Tensor top)
        {
            Contract.Requires(bottom != null);
            Contract.Requires(top != null);

            var bottomList = new TensorCollection {
                bottom
            };
            var topList = new TensorCollection {
                top
            };

            this.Setup(bottomList, topList);
        }
Exemple #9
0
        public override void Setup(TensorCollection bottom, TensorCollection top)
        {
            base.Setup(bottom, top);

            // NeuronLayer allows in-place computations. If the computation is not
            // in-place, we will need to initialize the top blob.
            var bottomBlob = bottom.First();
            var topBlob    = top.First();

            if (bottomBlob != topBlob)
            {
                topBlob.ReshapeAs(bottomBlob);
            }
        }
Exemple #10
0
        public double Forward(Tensor bottom, Tensor top)
        {
            Contract.Requires(bottom != null && top != null);

            Guard.That(() => bottom).IsNotNull();
            Guard.That(() => top).IsNotNull();

            var bottomList = new TensorCollection {
                bottom
            };
            var topList = new TensorCollection {
                top
            };

            return(this.Forward(bottomList, topList));
        }
Exemple #11
0
        public override void Setup(TensorCollection bottom, TensorCollection top)
        {
            base.Setup(bottom, top);
            softmaxLayer.Setup(bottom, new TensorCollection {
                probability
            });

            // Softmax loss ( averaged across batch )
            if (top.Count >= 1)
            {
                top[0].Reshape(1, 1, 1, 1);
            }

            // Also adds the softmax function output.
            if (top.Count == 2)
            {
                top[1].Reshape(bottom[0].Num, bottom[0].Channels, bottom[0].Height, bottom[0].Width);
            }
        }
        public override void Setup(TensorCollection bottom, TensorCollection top)
        {
            CheckSizeParameters();

            this._maxTopBlobs = 1;

            base.Setup(bottom, top);

            int channels = bottom[0].Channels;

            top[0].Reshape(bottom[0].Num, channels, Pooled.Height, Pooled.Width);

            using (var topCpu = top[0].OnCpu())
            {
                this.randomIndexes = Vector <double> .Build.SameAs(topCpu.Data);

                var distribution = new ContinuousUniform(0, 1);
                randomIndexes.MapInplace(x => distribution.Sample(), Zeros.Include);
            }
        }
        public override void Setup(TensorCollection bottom, TensorCollection top)
        {
            CheckSizeParameters();

            this._maxTopBlobs = 2;

            base.Setup(bottom, top);

            int num      = bottom[0].Num;
            int channels = bottom[0].Channels;

            foreach (var item in top)
            {
                item.Reshape(num, channels, Pooled.Height, Pooled.Width);
            }

            using (var topCpu = top[0].OnCpu())
            {
                this.maxIndexes = Vector <double> .Build.SameAs(topCpu.Data);
            }
        }
        public override void Setup(TensorCollection bottom, TensorCollection top)
        {
            base.Setup(bottom, top);

            this.n = Parameters.Outputs;
            this.m = bottom[0].Num;
            this.k = bottom[0].Count / bottom[0].Num;

            // Reshape the output
            top[0].Reshape(m, n, 1, 1);

            if (this.weights == null || this.bias == null)
            {
                // Fill the weights
                this.weights = new Tensor(1, 1, n, k);
                var weightsFiller = FillerFactory.Create(Parameters.WeightsFiller);
                weightsFiller.Fill(weights);

                // If necessary, initialize and fill the bias term
                if (Parameters.UseBias)
                {
                    this.bias = new Tensor(1, 1, 1, n);
                    var biasFiller = FillerFactory.Create(Parameters.BiasFiller);
                    biasFiller.Fill(bias);
                }
            }
            else
            {
                // LOG we are skipping the parameter initialization
            }

            if (Parameters.UseBias)
            {
                this.biasMultiplier = new Tensor(1, 1, 1, m);
                using (var biasMultiplierCpu = this.biasMultiplier.OnCpu())
                {
                    biasMultiplierCpu.Data.Map(v => 1, biasMultiplierCpu.Data, Zeros.Include);
                }
            }
        }
 public override void Setup(TensorCollection bottom, TensorCollection top)
 {
     base.Setup(bottom, top);
 }
Exemple #16
0
        public void Backward(TensorCollection top, IList <bool> propagateDown, TensorCollection bottom)
        {
            Contract.Requires(bottom != null && top != null);
            Contract.Requires(bottom.Count > 0 && top.Count > 0);
            Contract.Requires(propagateDown != null);
            Contract.ForAll <Tensor>(bottom, x => x != null);
            Contract.ForAll <Tensor>(top, x => x != null);

            // TODO Fail if not initialized.

            Guard.That(() => bottom).IsNotNull();
            Guard.That(() => top).IsNotNull();
            Guard.That(() => propagateDown).IsNotNull();

#if EXHAUSTIVE_DEBUG
            Guard.That(() => bottom).IsTrue(x => !x.Contains(null), "Cannot contain null.");
            Guard.That(() => top).IsTrue(x => !x.Contains(null), "Cannot contain null.");
#endif

            switch (Context.Instance.Mode)
            {
            case ExecutionModeType.Automatic:
            {
                if (backwardGpuSupported)
                {
                    using (var bottomGpu = bottom.OnGpu())
                        using (var topGpu = top.OnGpu())
                        {
                            try
                            {
                                BackwardGpu(topGpu, propagateDown, bottomGpu);
                            }
                            catch (NotSupportedException)
                            {
                                backwardGpuSupported = false;
                            }
                        }
                }
                else
                {
                    using (var bottomCpu = bottom.OnCpu())
                        using (var topCpu = top.OnCpu())
                        {
                            BackwardCpu(topCpu, propagateDown, bottomCpu);
                        }
                }

                return;
            }

            case ExecutionModeType.Gpu:
            {
                using (var bottomGpu = bottom.OnGpu())
                    using (var topGpu = top.OnGpu())
                    {
                        BackwardGpu(topGpu, propagateDown, bottomGpu);
                    }
                return;
            }

            case ExecutionModeType.Cpu:
            {
                using (var bottomCpu = bottom.OnCpu())
                    using (var topCpu = top.OnCpu())
                    {
                        BackwardCpu(topCpu, propagateDown, bottomCpu);
                    }
                return;
            }

            default: throw new NotSupportedException(string.Format("Mode of operation '{0}' not support", Context.Instance.Mode.ToString()));
            }
        }