예제 #1
0
        public void SoftmaxLayer_BackwardGradient()
        {
            var layer = new SoftmaxLayer();

            var checker = new GradientChecker(1e-2f, 1e-3f);
            checker.CheckExhaustive(layer, bottom, top);
        }
예제 #2
0
        public static SoftmaxLayer<T> Softmax<T>(this LayerBase<T> layer) where T : struct, IEquatable<T>, IFormattable
        {
            var softMax = new SoftmaxLayer<T>();
            softMax.AcceptParent(layer);

            return softMax;
        }
        public unsafe void SoftmaxBackwardOutput()
        {
            float[,]
            x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(400), 250, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(400, 250),
            y = new float[400, 127];
            for (int i = 0; i < 400; i++)
            {
                y[i, ThreadSafeRandom.NextInt(max: 127)] = 1;
            }
            OutputLayerBase
                cpu = new SoftmaxLayer(TensorInfo.Linear(250), 127, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnSoftmaxLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases);

            fixed(float *px = x, py = y)
            {
                Tensor.Reshape(px, x.GetLength(0), x.GetLength(1), out Tensor xt);
                cpu.Forward(xt, out Tensor z, out Tensor a);
                a.Duplicate(out Tensor a2);
                Tensor.Reshape(py, y.GetLength(0), y.GetLength(1), out Tensor yt);
                cpu.Backpropagate(a, yt, z);
                gpu.Backpropagate(a2, yt, z);
                Assert.IsTrue(a.ContentEquals(a2));
                a.Free();
                a2.Free();
                z.Free();
            }
        }
예제 #4
0
파일: RNN.cs 프로젝트: deTrident/RNNSharp
        protected SimpleLayer CreateOutputLayer(LayerConfig outputLayerConfig, int sparseFeatureSize, int denseFeatureSize)
        {
            SimpleLayer outputLayer = null;

            switch (outputLayerConfig.LayerType)
            {
            case LayerType.SampledSoftmax:
                Logger.WriteLine("Create sampled softmax layer as output layer");
                outputLayer = new SampledSoftmaxLayer(outputLayerConfig as SampledSoftmaxLayerConfig);
                outputLayer.InitializeWeights(0, denseFeatureSize);
                break;

            case LayerType.Softmax:
                Logger.WriteLine("Create softmax layer as output layer.");
                outputLayer = new SoftmaxLayer(outputLayerConfig as SoftmaxLayerConfig);
                outputLayer.InitializeWeights(sparseFeatureSize, denseFeatureSize);
                break;

            case LayerType.Simple:
                Logger.WriteLine("Create simple layer as output layer.");
                outputLayer = new SimpleLayer(outputLayerConfig as SimpleLayerConfig);
                outputLayer.InitializeWeights(sparseFeatureSize, denseFeatureSize);
                break;
            }
            outputLayer.LabelShortList = new List <int>();

            return(outputLayer);
        }
예제 #5
0
        public void SerializationTest()
        {
            // Create a SoftmaxLayer
            var layer = new SoftmaxLayer(5);

            layer.Init(10, 10, 3);

            SoftmaxLayer deserialized;

            using (var ms = new MemoryStream())
            {
                // Serialize
                IFormatter formatter = new BinaryFormatter();
                formatter.Serialize(ms, layer);

                // Deserialize
                ms.Position  = 0;
                deserialized = formatter.Deserialize(ms) as SoftmaxLayer;
            }

            Assert.AreEqual(layer.InputDepth, deserialized.InputDepth);
            Assert.AreEqual(layer.InputHeight, deserialized.InputHeight);
            Assert.AreEqual(layer.InputWidth, deserialized.InputWidth);
            Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth);
            Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight);
            Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth);
            Assert.AreEqual(layer.ClassCount, deserialized.ClassCount);
        }
예제 #6
0
        public void Test_Softmax_NullData()
        {
            DataArray    data = null;
            SoftmaxLayer soft = new SoftmaxLayer();

            soft.SetInput(data);
        }
예제 #7
0
        public void Test_Softmax_DifferentData()
        {
            Data2D       data = new Data2D(5, 4, 5, 10);
            SoftmaxLayer soft = new SoftmaxLayer();

            soft.SetInput(data);
        }
예제 #8
0
        public Brain(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, Phase phase)
        {
            m_mycaffe    = mycaffe;
            m_net        = mycaffe.GetInternalNet(phase);
            m_solver     = mycaffe.GetInternalSolver();
            m_properties = properties;
            m_random     = random;

            m_memData = m_net.FindLayer(LayerParameter.LayerType.MEMORYDATA, null) as MemoryDataLayer <T>;
            m_memLoss = m_net.FindLayer(LayerParameter.LayerType.MEMORY_LOSS, null) as MemoryLossLayer <T>;
            SoftmaxLayer <T> softmax = m_net.FindLayer(LayerParameter.LayerType.SOFTMAX, null) as SoftmaxLayer <T>;

            if (softmax != null)
            {
                throw new Exception("The PG.SIMPLE trainer does not support the Softmax layer, use the 'PG.ST' or 'PG.MT' trainer instead.");
            }

            if (m_memData == null)
            {
                throw new Exception("Could not find the MemoryData Layer!");
            }

            if (m_memLoss == null)
            {
                throw new Exception("Could not find the MemoryLoss Layer!");
            }

            m_memLoss.OnGetLoss += memLoss_OnGetLoss;

            m_blobDiscountedR    = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobPolicyGradient = new Blob <T>(mycaffe.Cuda, mycaffe.Log);

            m_nMiniBatch = mycaffe.CurrentProject.GetBatchSize(phase);
        }
예제 #9
0
        public static ILayer Load(LayerType layerType, BinaryReader br, bool forTraining = false)
        {
            ILayer layer = null;

            switch (layerType)
            {
            case LayerType.LSTM:
                layer = new LSTMLayer();
                break;

            case LayerType.DropOut:
                layer = new DropoutLayer();
                break;

            case LayerType.Softmax:
                layer = new SoftmaxLayer();
                break;

            case LayerType.SampledSoftmax:
                layer = new SampledSoftmaxLayer();
                break;

            case LayerType.Simple:
                layer = new SimpleLayer();
                break;
            }

            layer.Load(br, layerType, forTraining);

            return(layer);
        }
        public void SoftmaxForward()
        {
            OutputLayerBase
                cpu = new SoftmaxLayer(TensorInfo.Linear(250), 127, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnSoftmaxLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases);

            TestForward(cpu, gpu, 400);
        }
        public void SoftmaxLayer_BackwardGradient()
        {
            var layer = new SoftmaxLayer();
            layer.Setup(bottom, labels);

            var checker = new GradientChecker(1e-2f, 1e-2f);
            checker.CheckSingle(layer, bottom, labels, 0, -1, -1);
        }
예제 #12
0
        public static SoftmaxLayer <T> Softmax <T>(this LayerBase <T> layer, int classCount) where T : struct, IEquatable <T>, IFormattable
        {
            var softMax = new SoftmaxLayer <T>(classCount);

            layer.ConnectTo(softMax);

            return(softMax);
        }
예제 #13
0
        public static SoftmaxLayer Softmax(this LayerBase layer, int classCount)
        {
            var softMax = new SoftmaxLayer(classCount);

            layer.ConnectTo(softMax);

            return(softMax);
        }
예제 #14
0
        public void SoftmaxLayer_BackwardGradient()
        {
            var layer = new SoftmaxLayer();

            var checker = new GradientChecker(1e-2f, 1e-3f);

            checker.CheckExhaustive(layer, bottom, top);
        }
예제 #15
0
        private static Network CreateNewNetwork()
        {
            Network net = new Network();

            InputLayer il = new InputLayer();

            il.OutputWidth  = 32;
            il.OutputHeight = 32;
            il.OutputDepth  = 3;
            net.Layers.Add(il);

            ConvLayer conv = new ConvLayer(16, 5, 5, 3, 32, 32, 1, 2, 0, 1, 0.1);

            net.Layers.Add(conv);

            ReluLayer rl = new ReluLayer(conv.OutputDepth, conv.OutputWidth, conv.OutputHeight);

            net.Layers.Add(rl);

            MaxPoolLayer pl = new MaxPoolLayer(2, 2, rl.OutputDepth, rl.OutputWidth, rl.OutputHeight, 2, 0, 0);

            net.Layers.Add(pl);


            ConvLayer conv2 = new ConvLayer(20, 5, 5, pl.OutputDepth, pl.OutputWidth, pl.OutputHeight, 1, 2, 0, 1, 0.1);

            net.Layers.Add(conv2);

            ReluLayer rl2 = new ReluLayer(conv2.OutputDepth, conv2.OutputWidth, conv2.OutputHeight);

            net.Layers.Add(rl2);

            MaxPoolLayer pl2 = new MaxPoolLayer(2, 2, rl2.OutputDepth, rl2.OutputWidth, rl2.OutputHeight, 2, 0, 0);

            net.Layers.Add(pl2);


            ConvLayer conv3 = new ConvLayer(20, 5, 5, pl2.OutputDepth, pl2.OutputWidth, pl2.OutputHeight, 1, 2, 0, 1, 0.1);

            net.Layers.Add(conv3);

            ReluLayer rl3 = new ReluLayer(conv3.OutputDepth, conv3.OutputWidth, conv3.OutputHeight);

            net.Layers.Add(rl3);

            MaxPoolLayer pl3 = new MaxPoolLayer(2, 2, rl3.OutputDepth, rl3.OutputWidth, rl3.OutputHeight, 2, 0, 0);

            net.Layers.Add(pl3);

            FullyConnLayer fc = new FullyConnLayer(10, pl3.OutputDepth, pl3.OutputWidth, pl3.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc);

            SoftmaxLayer sl = new SoftmaxLayer(fc.OutputDepth, fc.OutputWidth, fc.OutputHeight);

            net.LossLayer = sl;
            return(net);
        }
        public void SoftmaxForward()
        {
            float[,] x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(400), 250, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(400, 250);
            OutputLayerBase
                cpu = new SoftmaxLayer(TensorInfo.Linear(250), 127, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnSoftmaxLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases);

            TestForward(cpu, gpu, x);
        }
예제 #17
0
        public void SoftmaxLayer_Setup()
        {
            var layer = new SoftmaxLayer();
            layer.Setup(bottom, top);

            Assert.Equal(bottom.Num, top.Num);
            Assert.Equal(bottom.Channels, top.Channels);
            Assert.Equal(bottom.Height, top.Height);
            Assert.Equal(bottom.Width, top.Width);
        }
        public void SoftmaxLayer_BackwardGradient()
        {
            var layer = new SoftmaxLayer();

            layer.Setup(bottom, labels);

            var checker = new GradientChecker(1e-2f, 1e-2f);

            checker.CheckSingle(layer, bottom, labels, 0, -1, -1);
        }
예제 #19
0
        public Brain(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, Phase phase)
        {
            m_mycaffe    = mycaffe;
            m_net        = mycaffe.GetInternalNet(phase);
            m_solver     = mycaffe.GetInternalSolver();
            m_properties = properties;
            m_random     = random;

            m_memData = m_net.FindLayer(LayerParameter.LayerType.MEMORYDATA, null) as MemoryDataLayer <T>;
            m_memLoss = m_net.FindLayer(LayerParameter.LayerType.MEMORY_LOSS, null) as MemoryLossLayer <T>;
            m_softmax = m_net.FindLayer(LayerParameter.LayerType.SOFTMAX, null) as SoftmaxLayer <T>;

            if (m_memData == null)
            {
                throw new Exception("Could not find the MemoryData Layer!");
            }

            if (m_memLoss == null)
            {
                throw new Exception("Could not find the MemoryLoss Layer!");
            }

            m_memData.OnDataPack += memData_OnDataPack;
            m_memLoss.OnGetLoss  += memLoss_OnGetLoss;

            m_blobDiscountedR     = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobPolicyGradient  = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobActionOneHot    = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobDiscountedR1    = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobPolicyGradient1 = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobActionOneHot1   = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobLoss            = new Blob <T>(mycaffe.Cuda, mycaffe.Log);
            m_blobAprobLogit      = new Blob <T>(mycaffe.Cuda, mycaffe.Log);

            if (m_softmax != null)
            {
                LayerParameter p = new LayerParameter(LayerParameter.LayerType.SOFTMAXCROSSENTROPY_LOSS);
                p.loss_weight.Add(1);
                p.loss_weight.Add(0);
                p.loss_param.normalization = LossParameter.NormalizationMode.NONE;
                m_softmaxCe = new SoftmaxCrossEntropyLossLayer <T>(mycaffe.Cuda, mycaffe.Log, p);
            }

            m_colAccumulatedGradients = m_net.learnable_parameters.Clone();
            m_colAccumulatedGradients.SetDiff(0);

            int nMiniBatch = mycaffe.CurrentProject.GetBatchSize(phase);

            if (nMiniBatch != 0)
            {
                m_nMiniBatch = nMiniBatch;
            }

            m_nMiniBatch = m_properties.GetPropertyAsInt("MiniBatch", m_nMiniBatch);
        }
예제 #20
0
        public void SoftmaxLayer_Setup()
        {
            var layer = new SoftmaxLayer();

            layer.Setup(bottom, top);

            Assert.Equal(bottom.Num, top.Num);
            Assert.Equal(bottom.Channels, top.Channels);
            Assert.Equal(bottom.Height, top.Height);
            Assert.Equal(bottom.Width, top.Width);
        }
예제 #21
0
        public SoftMaxLayerTests()
        {
            this.layer = new SoftmaxLayer(4);
            this.layer.Init(1, 1, 4);

            this.input = Volume.SameAs(new[]
            {
                0.1, 0.1, 0.1, 0.1,
                1000, 2000, 3000, 4000,
                0, 0, 0, 0
            }, new Shape(1, 1, 4, 3));
        }
예제 #22
0
        public MNISTNetwork(int inputSize, int hiddenSize, int outputSize) : base()
        {
            this.inputSize  = inputSize;
            this.hiddenSize = hiddenSize;
            this.outputSize = outputSize;

            affine1 = new AffineLayer(inputSize, hiddenSize);
            relu    = new ReLULayer();
            affine2 = new AffineLayer(hiddenSize, outputSize);
            softmax = new SoftmaxLayer();

            optimizer = new MomentumOptimizer(0.9f);
        }
        public void SoftmaxBackwardOutput()
        {
            float[,] y = new float[400, 127];
            for (int i = 0; i < 400; i++)
            {
                y[i, ThreadSafeRandom.NextInt(max: 127)] = 1;
            }
            OutputLayerBase
                cpu = new SoftmaxLayer(TensorInfo.Linear(250), 127, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnSoftmaxLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases);

            TestBackward(cpu, gpu, y);
        }
예제 #24
0
            public SoftmaxLayer(SoftmaxLayer layer) : base(layer)
            {
                this.weights = new double[layer.weights.Length];
                this.biases  = new double[layer.biases.Length];

                for (int i = 0; i < layer.weights.Length; i++)
                {
                    this.weights[i] = layer.weights[i];
                }

                for (int i = 0; i < layer.biases.Length; i++)
                {
                    this.biases[i] = layer.biases[i];
                }
            }
예제 #25
0
            public SoftmaxLayer(SoftmaxLayer sourceLayer, Layer targetLayer) : base(sourceLayer, targetLayer)
            {
                this.weights = new double[sourceLayer.weights.Length];
                this.biases  = new double[sourceLayer.biases.Length];

                for (int i = 0; i < sourceLayer.weights.Length; i++)
                {
                    this.weights[i] = sourceLayer.weights[i];
                }

                for (int i = 0; i < sourceLayer.biases.Length; i++)
                {
                    this.biases[i] = sourceLayer.biases[i];
                }
            }
        internal static INetworkLayer CpuLayerDeserialize([NotNull] Stream stream, LayerType type)
        {
            switch (type)
            {
            case LayerType.FullyConnected: return(FullyConnectedLayer.Deserialize(stream));

            case LayerType.Convolutional: return(ConvolutionalLayer.Deserialize(stream));

            case LayerType.Pooling: return(PoolingLayer.Deserialize(stream));

            case LayerType.Output: return(OutputLayer.Deserialize(stream));

            case LayerType.Softmax: return(SoftmaxLayer.Deserialize(stream));

            default: throw new ArgumentOutOfRangeException(nameof(type), $"The {type} layer type is not supported by the default deserializer");
            }
        }
예제 #27
0
        public void SoftmaxLayer_Forward()
        {
            var layer = new SoftmaxLayer();

            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
                using (var bottomCpu = bottom.OnCpu())
                {
                    int count = bottom.Count;

                    int num      = bottom.Num;
                    int channels = bottom.Channels;
                    for (int i = 0; i < num; i++)
                    {
                        double sum = 0;
                        for (int j = 0; j < channels; j++)
                        {
                            sum += topCpu.DataAt(i, j, 0, 0);
                        }

                        Assert.True(sum >= 0.999);
                        Assert.True(sum <= 1.001);
                    }

                    for (int i = 0; i < num; i++)
                    {
                        double scale = 0;
                        for (int j = 0; j < channels; j++)
                        {
                            scale += Math.Exp(bottomCpu.DataAt(i, j, 0, 0));
                        }

                        for (int j = 0; j < channels; j++)
                        {
                            Assert.True(topCpu.DataAt(i, j, 0, 0) + 1e-4f >= Math.Exp(bottomCpu.DataAt(i, j, 0, 0)) / scale);
                            Assert.True(topCpu.DataAt(i, j, 0, 0) - 1e-4f <= Math.Exp(bottomCpu.DataAt(i, j, 0, 0)) / scale);
                        }
                    }
                }
        }
예제 #28
0
        public MNISTBatchNormalizationNetwork(int inputSize, int hiddenSize, int outputSize) : base()
        {
            this.inputSize  = inputSize;
            this.hiddenSize = hiddenSize;
            this.outputSize = outputSize;

            affine1 = new AffineLayer(inputSize, hiddenSize, Mathf.Sqrt(2.0f / inputSize));
            bn1     = new BatchNormalizationLayer(hiddenSize, hiddenSize);
            relu1   = new ReLULayer();

            affine2 = new AffineLayer(hiddenSize, hiddenSize, Mathf.Sqrt(2.0f / hiddenSize));
            bn2     = new BatchNormalizationLayer(hiddenSize, hiddenSize);
            relu2   = new ReLULayer();

            affine3 = new AffineLayer(hiddenSize, outputSize, Mathf.Sqrt(2.0f / hiddenSize));
            softmax = new SoftmaxLayer();

            optimizer = new MomentumOptimizer(0.9f);
        }
예제 #29
0
파일: SandBox.cs 프로젝트: shchy/dl
        public void Run()
        {
            // 入力レイヤ
            var inputLayer = new InputLayer(3);
            // 隠れレイヤ
            var layer00 = new FullyConnectedLayer(inputLayer, 20, DLF.ReLU, DLF.UpdateWeight(), DLF.GetRandomWeight);
            // 隠れレイヤ
            var layer01 = new FullyConnectedLayer(layer00, 10, DLF.ReLU, DLF.UpdateWeight(), DLF.GetRandomWeight);
            // 出力レイヤ
            var layer02 = new SoftmaxLayer(layer01, 3);

            var batchSize    = 8;
            var epoch        = 1000;
            var learningRate = 0.01;
            Func <IEnumerable <Tuple <double, double> >, double> errorFunction = DLF.ErrorFunctionCrossEntropy;

            var machine = new Machine(learningRate, epoch, batchSize, new Validator(3)
                                      , x => errorFunction(x) * (1.0 / batchSize)
                                      , inputLayer
                                      , layer00
                                      , layer01
                                      , layer02);
            // 学習データを生成
            var testData = DLF.Shuffle(
                from x in Enumerable.Range(1, 8)
                from y in Enumerable.Range(1, 8)
                from z in Enumerable.Range(1, 8)
                let v = x + (y * 2) + z
                        let expect = v < 15 ? new[] { 1.0, 0.0, 0.0 }
                        : v < 20 ? new[] { 0.0, 1.0, 0.0 }
                        : new[] { 0.0, 0.0, 1.0 }
                select LearningData.New(expect.ToString(), new double[] { x, y, z }, expect))
                           .ToArray();

            var validData = testData.Skip(testData.Length / 2).ToArray();

            testData = testData.Take(testData.Length / 2).ToArray();

            machine.Learn(testData.ToArray(), validData.ToArray());
        }
예제 #30
0
파일: RNN.cs 프로젝트: deTrident/RNNSharp
        public static SimpleLayer Load(LayerType layerType, BinaryReader br)
        {
            switch (layerType)
            {
            case LayerType.LSTM:
                return(LSTMLayer.Load(br, LayerType.LSTM));

            case LayerType.DropOut:
                return(DropoutLayer.Load(br, LayerType.DropOut));

            case LayerType.Softmax:
                return(SoftmaxLayer.Load(br, LayerType.Softmax));

            case LayerType.SampledSoftmax:
                return(SampledSoftmaxLayer.Load(br, LayerType.SampledSoftmax));

            case LayerType.Simple:
                return(SimpleLayer.Load(br, LayerType.Simple));
            }

            return(null);
        }
예제 #31
0
        public void SoftmaxLayer_Forward()
        {
            var layer = new SoftmaxLayer();
            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
            using (var bottomCpu = bottom.OnCpu())
            {
                int count = bottom.Count;

                int num = bottom.Num;
                int channels = bottom.Channels;
                for (int i = 0; i < num; i++)
                {
                    double sum = 0;
                    for (int j = 0; j < channels; j++)
                        sum += topCpu.DataAt(i, j, 0, 0);

                    Assert.True(sum >= 0.999);
                    Assert.True(sum <= 1.001);
                }

                for (int i = 0; i < num; i++)
                {
                    double scale = 0;
                    for (int j = 0; j < channels; j++)
                        scale += Math.Exp(bottomCpu.DataAt(i, j, 0, 0));

                    for (int j = 0; j < channels; j++)
                    {
                        Assert.True(topCpu.DataAt(i, j, 0, 0) + 1e-4f >= Math.Exp(bottomCpu.DataAt(i, j, 0, 0)) / scale);
                        Assert.True(topCpu.DataAt(i, j, 0, 0) - 1e-4f <= Math.Exp(bottomCpu.DataAt(i, j, 0, 0)) / scale);
                    }
                }
            }
        }
예제 #32
0
        public override void InitializeNetwork()
        {
            //
            m_Layers = new Layer[m_Parameters.m_NumHiddenLayers + 1];

            //create neurons in hidden layers
            for (int i = 0; i < m_Parameters.m_NumHiddenLayers; i++)
            {
                if (i == 0)
                {
                    m_Layers[i] = new ANNLayer(m_InputCount, m_Parameters.m_NeuronsInHiddenLayer, m_Parameters.m_ActFunction);
                }
                else
                {
                    m_Layers[i] = new ANNLayer(m_Parameters.m_NeuronsInHiddenLayer, m_Parameters.m_NeuronsInHiddenLayer, m_Parameters.m_ActFunction);
                }
            }

            //create neurons and error array for the last layer, which is usualy 1
            Layer ly = new SoftmaxLayer(m_Parameters.m_NeuronsInHiddenLayer, m_OutputCount, new Linear());

            m_Layers[m_Parameters.m_NumHiddenLayers] = ly;
        }
예제 #33
0
        public void SoftmaxLayerSerialization()
        {
            var layer = new SoftmaxLayer(10);

            layer.Init(28, 24, 1);
            var data = layer.GetData();

            Assert.AreEqual(28, data["InputWidth"]);
            Assert.AreEqual(24, data["InputHeight"]);
            Assert.AreEqual(1, data["InputDepth"]);

            var deserialized = LayerBase <double> .FromData(data) as SoftmaxLayer;

            Assert.IsNotNull(deserialized);
            Assert.AreEqual(28, deserialized.InputWidth);
            Assert.AreEqual(24, deserialized.InputHeight);
            Assert.AreEqual(1, deserialized.InputDepth);
            Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth);
            Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight);
            Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth);

            Assert.AreEqual(layer.ClassCount, deserialized.ClassCount);
        }
예제 #34
0
        public void Test_Softmax_Execute()
        {
            // Softmax output
            softmax = new SoftmaxLayer();
            Data2D data = new Data2D(1, 1, 5, 1);

            data[0, 0, 0, 0] = 0.0;
            data[0, 0, 1, 0] = 1.0;
            data[0, 0, 2, 0] = 1.5;
            data[0, 0, 3, 0] = 2.0;
            data[0, 0, 4, 0] = 3.0;

            softmax.SetInput(data);
            softmax.Execute();

            Data2D output = softmax.GetOutput() as Data2D;

            // Expected output
            double[] expOu = new double[5];

            double sum = 0.0;

            sum += (Math.Exp(0.0) + Math.Exp(1.0) + Math.Exp(1.5) + Math.Exp(2.0) + Math.Exp(3.0));

            expOu[0] = Math.Exp(0.0) / sum;
            expOu[1] = Math.Exp(1.0) / sum;
            expOu[2] = Math.Exp(1.5) / sum;
            expOu[3] = Math.Exp(2.0) / sum;
            expOu[4] = Math.Exp(3.0) / sum;

            Assert.AreEqual(output[0, 0, 0, 0], expOu[0], 0.00000001);
            Assert.AreEqual(output[0, 0, 1, 0], expOu[1], 0.00000001);
            Assert.AreEqual(output[0, 0, 2, 0], expOu[2], 0.00000001);
            Assert.AreEqual(output[0, 0, 3, 0], expOu[3], 0.00000001);
            Assert.AreEqual(output[0, 0, 4, 0], expOu[4], 0.00000001);
        }