public unsafe void SoftmaxBackwardOutput()
        {
            float[,]
            x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(400), 250, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(400, 250),
            y = new float[400, 127];
            for (int i = 0; i < 400; i++)
            {
                y[i, ThreadSafeRandom.NextInt(max: 127)] = 1;
            }
            OutputLayerBase
                cpu = new SoftmaxLayer(TensorInfo.Linear(250), 127, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnSoftmaxLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases);

            fixed(float *px = x, py = y)
            {
                Tensor.Reshape(px, x.GetLength(0), x.GetLength(1), out Tensor xt);
                cpu.Forward(xt, out Tensor z, out Tensor a);
                a.Duplicate(out Tensor a2);
                Tensor.Reshape(py, y.GetLength(0), y.GetLength(1), out Tensor yt);
                cpu.Backpropagate(a, yt, z);
                gpu.Backpropagate(a2, yt, z);
                Assert.IsTrue(a.ContentEquals(a2));
                a.Free();
                a2.Free();
                z.Free();
            }
        }
예제 #2
0
        public override float Loss(ComputeShader compute, Signal signal, Signal answer, bool train)
        {
            var predictSig = Predict(compute, signal, train);

            var softmaxSig = softmax.Forward(compute, predictSig, train);

            predictSig.Dispose();

            return(CrossEntropyError.Loss(compute, softmaxSig, answer));
        }
예제 #3
0
        public void SoftmaxLayer_Forward()
        {
            var layer = new SoftmaxLayer();

            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
                using (var bottomCpu = bottom.OnCpu())
                {
                    int count = bottom.Count;

                    int num      = bottom.Num;
                    int channels = bottom.Channels;
                    for (int i = 0; i < num; i++)
                    {
                        double sum = 0;
                        for (int j = 0; j < channels; j++)
                        {
                            sum += topCpu.DataAt(i, j, 0, 0);
                        }

                        Assert.True(sum >= 0.999);
                        Assert.True(sum <= 1.001);
                    }

                    for (int i = 0; i < num; i++)
                    {
                        double scale = 0;
                        for (int j = 0; j < channels; j++)
                        {
                            scale += Math.Exp(bottomCpu.DataAt(i, j, 0, 0));
                        }

                        for (int j = 0; j < channels; j++)
                        {
                            Assert.True(topCpu.DataAt(i, j, 0, 0) + 1e-4f >= Math.Exp(bottomCpu.DataAt(i, j, 0, 0)) / scale);
                            Assert.True(topCpu.DataAt(i, j, 0, 0) - 1e-4f <= Math.Exp(bottomCpu.DataAt(i, j, 0, 0)) / scale);
                        }
                    }
                }
        }
예제 #4
0
        public void SoftmaxLayer_Forward()
        {
            var layer = new SoftmaxLayer();
            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
            using (var bottomCpu = bottom.OnCpu())
            {
                int count = bottom.Count;

                int num = bottom.Num;
                int channels = bottom.Channels;
                for (int i = 0; i < num; i++)
                {
                    double sum = 0;
                    for (int j = 0; j < channels; j++)
                        sum += topCpu.DataAt(i, j, 0, 0);

                    Assert.True(sum >= 0.999);
                    Assert.True(sum <= 1.001);
                }

                for (int i = 0; i < num; i++)
                {
                    double scale = 0;
                    for (int j = 0; j < channels; j++)
                        scale += Math.Exp(bottomCpu.DataAt(i, j, 0, 0));

                    for (int j = 0; j < channels; j++)
                    {
                        Assert.True(topCpu.DataAt(i, j, 0, 0) + 1e-4f >= Math.Exp(bottomCpu.DataAt(i, j, 0, 0)) / scale);
                        Assert.True(topCpu.DataAt(i, j, 0, 0) - 1e-4f <= Math.Exp(bottomCpu.DataAt(i, j, 0, 0)) / scale);
                    }
                }
            }
        }