示例#1
0
        public void Forward(SoftmaxLayerArgument argument, ForwardContext context)
        {
            var src  = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryInputAddress));
            var dest = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryOutputAddress));

            float max = float.MinValue;

            for (int oc = 0; oc < argument.Channels; oc++)
            {
                max = Math.Max(max, src[oc]);
            }

            float sum = 0;

            for (int oc = 0; oc < argument.Channels; oc++)
            {
                float value = (float)Math.Exp(src[oc] - max);
                sum     += value;
                dest[oc] = value;
            }

            for (int oc = 0; oc < argument.Channels; oc++)
            {
                dest[oc] /= sum;
            }
        }
示例#2
0
        public void Forward(ResizeNearestNeighborLayerArgument argument, ForwardContext context)
        {
            var src  = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryInputAddress));
            var dest = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryOutputAddress));

            float heightScale = (float)argument.InputHeight / argument.OutputHeight;
            float widthScale  = (float)argument.InputWidth / argument.OutputWidth;

            int destIdx = 0;

            for (int oc = 0; oc < argument.Channels; oc++)
            {
                var channelSrc = src.Slice((int)(argument.InputWidth * argument.InputHeight * oc));
                for (int oy = 0; oy < argument.OutputHeight; oy++)
                {
                    var inY     = (int)Math.Min(Math.Floor(oy * heightScale), argument.InputHeight - 1);
                    var yOrigin = channelSrc.Slice(inY * (int)argument.InputWidth);
                    for (int ox = 0; ox < argument.OutputWidth; ox++)
                    {
                        var inX = (int)Math.Min(Math.Floor(ox * widthScale), argument.InputWidth - 1);
                        dest[destIdx++] = yOrigin[inX];
                    }
                }
            }
        }
示例#3
0
        public void Forward(LogisticLayerArgument argument, ForwardContext context)
        {
            var src  = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryInputAddress));
            var dest = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryOutputAddress));

            for (int oc = 0; oc < argument.Channels; oc++)
            {
                dest[oc] = 1f / (1f + (float)Math.Exp(-src[oc]));
            }
        }
示例#4
0
        public void Forward(RequantizeLayerArgument argument, ForwardContext context)
        {
            var src  = context.GetMainRamAt((int)argument.MainMemoryInputAddress);
            var dest = context.GetMainRamAt((int)argument.MainMemoryOutputAddress);

            for (int i = 0; i < argument.Count; i++)
            {
                dest[i] = argument.Table[src[i]];
            }
        }
示例#5
0
        public void Forward(K210RemovePaddingLayerArgument argument, ForwardContext context)
        {
            var src  = context.GetMainRamAt((int)argument.MainMemoryInputAddress);
            var dest = context.GetMainRamAt((int)argument.MainMemoryOutputAddress);

            for (int oc = 0; oc < argument.Channels; oc++)
            {
                dest[oc] = src[oc * 16];
            }
        }
示例#6
0
        public void Forward(DequantizeLayerArgument argument, ForwardContext context)
        {
            var src  = context.GetMainRamAt((int)argument.MainMemoryInputAddress);
            var dest = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryOutputAddress));
            var q    = argument.QuantParam;

            for (int i = 0; i < argument.Count; i++)
            {
                dest[i] = src[i] * q.Scale + q.Bias;
            }
        }
示例#7
0
文件: Add.cs 项目: xiuyu999/nncase
        public void Forward(AddLayerArgument argument, ForwardContext context)
        {
            var srcA = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryInputAAddress));
            var srcB = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryInputBAddress));
            var dest = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryOutputAddress));

            for (int oc = 0; oc < argument.Count; oc++)
            {
                dest[oc] = srcA[oc] + srcB[oc];
            }
        }
示例#8
0
        public void Forward(ConcatenationLayerArgument argument, ForwardContext context)
        {
            var dest = context.GetMainRamAt((int)argument.MainMemoryOutputAddress);

            foreach (var input in argument.InputsMainMemory)
            {
                var src = context.GetMainRamAt((int)input.Start, (int)input.Size);
                src.CopyTo(dest);
                dest = dest.Slice((int)input.Size);
            }
        }
示例#9
0
        public void Forward(QuantizeLayerArgument argument, ForwardContext context)
        {
            var   src   = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryInputAddress));
            var   dest  = context.GetMainRamAt((int)argument.MainMemoryOutputAddress);
            var   q     = argument.QuantParam;
            float scale = 1f / q.Scale;

            for (int i = 0; i < argument.Count; i++)
            {
                int value = (int)Math.Round((src[i] - q.Bias) * scale);
                dest[i] = (byte)FxExtensions.Clamp(value, 0, 0xFF);
            }
        }
示例#10
0
        public void Forward(FullyConnectedLayerArgument argument, ForwardContext context)
        {
            var src = MemoryMarshal.Cast<byte, float>(context.GetMainRamAt((int)argument.MainMemoryInputAddress));
            var dest = MemoryMarshal.Cast<byte, float>(context.GetMainRamAt((int)argument.MainMemoryOutputAddress));

            for (int oc = 0; oc < argument.OutputChannels; oc++)
            {
                var weights = new ReadOnlySpan<float>(argument.Weights, (int)(oc * argument.InputChannels), (int)argument.InputChannels);
                float sum = 0;
                for (int ic = 0; ic < argument.InputChannels; ic++)
                    sum += src[ic] * weights[ic];
                dest[oc] = sum + argument.Bias[oc];
            }
        }
示例#11
0
        public void Forward(K210UploadLayerArgument argument, ForwardContext context)
        {
            var src  = context.GetMainRamAt((int)argument.MainMemoryInputAddress);
            var dest = context.GetKpuRamAt((int)argument.KPUMemoryOutputAddress);

            K210Helper.KpuUpload(dest, src, (int)argument.Width, (int)argument.Height, (int)argument.Channels);
        }
示例#12
0
        public void Forward(GlobalAveragePool2dLayerArgument argument, ForwardContext context)
        {
            var src  = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryInputAddress));
            var dest = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryOutputAddress));

            int i = 0;

            for (int oc = 0; oc < argument.Channels; oc++)
            {
                float sum = 0;
                for (int x = 0; x < argument.KernelSize; x++)
                {
                    sum += src[i++];
                }
                dest[oc] = sum / argument.KernelSize;
            }
        }
示例#13
0
        public void Forward(ChannelwiseDequantizeLayerArgument argument, ForwardContext context)
        {
            var src  = context.GetMainRamAt((int)argument.MainMemoryInputAddress);
            var dest = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryOutputAddress));

            int idx = 0;

            for (int oc = 0; oc < argument.Channels; oc++)
            {
                var q = argument.QuantParams[oc];
                for (int i = 0; i < argument.ChannelSize; i++)
                {
                    dest[idx] = src[idx] * q.Scale + q.Bias;
                    idx++;
                }
            }
        }
示例#14
0
        public void Forward(TensorflowFlattenLayerArgument argument, ForwardContext context)
        {
            var src  = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryInputAddress));
            var dest = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryOutputAddress));

            int i = 0;

            for (int oy = 0; oy < argument.Height; oy++)
            {
                for (int ox = 0; ox < argument.Width; ox++)
                {
                    for (int oc = 0; oc < argument.Channels; oc++)
                    {
                        dest[i++] = src[(int)((oc * argument.Height + oy) * argument.Width + ox)];
                    }
                }
            }
        }
示例#15
0
        public void Forward(AveragePool2dLayerArgument argument, ForwardContext context)
        {
            var src  = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryInputAddress));
            var dest = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryOutputAddress));

            int outIdx = 0;

            for (int oc = 0; oc < argument.OutputChannels; oc++)
            {
                var channelSrc = src.Slice((int)(argument.InputWidth * argument.InputHeight * oc));
                for (int oy = 0; oy < argument.OutputHeight; oy++)
                {
                    for (int ox = 0; ox < argument.OutputWidth; ox++)
                    {
                        int   inXOrigin    = (int)(ox * argument.StrideWidth) - (int)argument.PaddingWidth;
                        int   inYOrigin    = (int)(oy * argument.StrideHeight) - (int)argument.PaddingHeight;
                        int   kernelXStart = Math.Max(0, -inXOrigin);
                        int   kernelXEnd   = Math.Min((int)argument.KernelWidth, (int)argument.InputWidth - inXOrigin);
                        int   kernelYStart = Math.Max(0, -inYOrigin);
                        int   kernelYEnd   = Math.Min((int)argument.KernelHeight, (int)argument.InputHeight - inYOrigin);
                        float value        = 0;
                        float kernelCount  = 0;

                        for (int ky = kernelYStart; ky < kernelYEnd; ky++)
                        {
                            for (int kx = kernelXStart; kx < kernelXEnd; kx++)
                            {
                                int inX = inXOrigin + kx;
                                int inY = inYOrigin + ky;
                                value += channelSrc[inY * (int)argument.InputWidth + inX];
                                kernelCount++;
                            }
                        }

                        dest[outIdx++] = value / kernelCount;
                    }
                }
            }
        }
示例#16
0
        public void Forward(L2NormalizationLayerArgument argument, ForwardContext context)
        {
            var src  = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryInputAddress));
            var dest = MemoryMarshal.Cast <byte, float>(context.GetMainRamAt((int)argument.MainMemoryOutputAddress));

            float       sum     = 0;
            const float epsilon = 1e-10f;

            for (int oc = 0; oc < argument.Channels; oc++)
            {
                sum += src[oc] * src[oc];
            }
            if (sum < epsilon)
            {
                sum = epsilon;
            }
            sum = 1f / (float)Math.Sqrt(sum);

            for (int oc = 0; oc < argument.Channels; oc++)
            {
                dest[oc] = src[oc] * sum;
            }
        }
示例#17
0
        public void Forward(K210AddPaddingLayerArgument argument, ForwardContext context)
        {
            var src  = context.GetMainRamAt((int)argument.MainMemoryInputAddress);
            var dest = context.GetKpuRamAt((int)argument.KPUMemoryOutputAddress);

            var height = 4;

            (var groups, var rowLength, var rowPadding) = (4, 1, 16);
            int srcIdx = 0;

            for (int oc = 0; oc < argument.Channels; oc++)
            {
                var channel_origin = oc / groups * rowLength * height * 64 + oc % groups * rowPadding;
                for (int y = 0; y < 1; y++)
                {
                    var y_origin = channel_origin + y * rowLength * 64;
                    for (int x = 0; x < 1; x++)
                    {
                        dest[y_origin + x] = src[srcIdx++];
                    }
                }
            }
        }