public unsafe void SoftmaxBackwardOutput()
        {
            float[,]
            x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(400), 250, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(400, 250),
            y = new float[400, 127];
            for (int i = 0; i < 400; i++)
            {
                y[i, ThreadSafeRandom.NextInt(max: 127)] = 1;
            }
            OutputLayerBase
                cpu = new SoftmaxLayer(TensorInfo.Linear(250), 127, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnSoftmaxLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases);

            fixed(float *px = x, py = y)
            {
                Tensor.Reshape(px, x.GetLength(0), x.GetLength(1), out Tensor xt);
                cpu.Forward(xt, out Tensor z, out Tensor a);
                a.Duplicate(out Tensor a2);
                Tensor.Reshape(py, y.GetLength(0), y.GetLength(1), out Tensor yt);
                cpu.Backpropagate(a, yt, z);
                gpu.Backpropagate(a2, yt, z);
                Assert.IsTrue(a.ContentEquals(a2));
                a.Free();
                a2.Free();
                z.Free();
            }
        }
Exemple #2
0
        public static async Task Main()
        {
            // Create the network
            INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.Image <Alpha8>(28, 28),
                                                                  NetworkLayers.Convolutional((5, 5), 20, ActivationType.Identity),
                                                                  NetworkLayers.Pooling(ActivationType.LeakyReLU),
                                                                  NetworkLayers.FullyConnected(100, ActivationType.LeCunTanh),
                                                                  NetworkLayers.Softmax(10));

            // Prepare the dataset
            ITrainingDataset trainingData = await Mnist.GetTrainingDatasetAsync(100); // Batches of 100 samples

            ITestDataset testData = await Mnist.GetTestDatasetAsync(p => Printf($"Epoch {p.Iteration}, cost: {p.Result.Cost}, accuracy: {p.Result.Accuracy}"));

            if (trainingData == null || testData == null)
            {
                Printf("Error downloading the datasets");
                Console.ReadKey();
                return;
            }

            // Train the network
            TrainingSessionResult result = await NetworkManager.TrainNetworkAsync(network,
                                                                                  trainingData,
                                                                                  TrainingAlgorithms.AdaDelta(),
                                                                                  60, 0.5f,
                                                                                  TrackBatchProgress,
                                                                                  testDataset : testData);

            Printf($"Stop reason: {result.StopReason}, elapsed time: {result.TrainingTime}");
            Console.ReadKey();
        }
Exemple #3
0
        private void CrearNeuralNetwork()
        {
            int neuronasOcultas  = (int)spinNeuronasOculta.Value;
            var activacionOculta = cboFuncionActivacionOculta.SelectedItem as EnumInfo <ActivationType>;
            var pesosOculta      = cboPesosOculta.SelectedItem as EnumInfo <WeightsInitializationMode>;
            var biasOculta       = cboBiasOculta.SelectedItem as EnumInfo <BiasInitializationMode>;

            var activacionSalida = cboFuncionActivacionSalida.SelectedItem as EnumInfo <ActivationType>;
            var funcionCosto     = cboFuncionCosto.SelectedItem as EnumInfo <CostFunctionType>;
            var pesosSalida      = cboPesosSalida.SelectedItem as EnumInfo <WeightsInitializationMode>;
            var biasSalida       = cboBiasSalida.SelectedItem as EnumInfo <BiasInitializationMode>;

            LayerFactory layerSalida;

            if (activacionSalida.Valor == ActivationType.Softmax)
            {
                layerSalida = NetworkLayers.Softmax(3, pesosOculta.Valor, biasOculta.Valor);
            }
            else
            {
                layerSalida = NetworkLayers.FullyConnected(3, activacionSalida.Valor, funcionCosto.Valor, pesosSalida.Valor, biasSalida.Valor);
            }

            _neuralNetwork = NetworkManager.NewSequential(TensorInfo.Linear(4),
                                                          NetworkLayers.FullyConnected(neuronasOcultas, activacionOculta.Valor, pesosOculta.Valor, biasOculta.Valor),
                                                          layerSalida);
        }
Exemple #4
0
 public unsafe void Pool2()
 {
     // Test values
     float[,]
     m =
     {
         {
             0.77f, -0.11f, 0.11f, 0.33f, 0.55f, -0.11f, 0.33f,
             -0.11f, 1, -0.11f, 0.33f, -0.11f, 0.11f, -0.11f,
             0.11f, -0.11f, 1, -0.33f, 0.11f, -0.11f, 0.55f,
             0.33f, 0.33f, -0.33f, 0.55f, -0.33f, 0.33f, 0.33f,
             0.55f, -0.11f, 0.11f, -0.33f, 1, -0.11f, 0.11f,
             -0.11f, 0.11f, -0.11f, 0.33f, -0.11f, 1, -0.11f,
             0.33f, -0.11f, 0.55f, 0.33f, 0.11f, -0.11f, 0.77f
         }
     },
     r =
     {
         {
             1, 0.33f, 0.55f, 0.33f,
             0.33f, 1, 0.33f, 0.55f,
             0.55f, 0.33f, 1, 0.11f,
             0.33f, 0.55f, 0.11f, 0.77f
         }
     };
     fixed(float *pm = m)
     {
         Tensor.Reshape(pm, 1, 49, out Tensor mTensor);
         Tensor.New(1, 16, out Tensor result);
         CpuDnn.PoolingForward(mTensor, TensorInfo.Image <Alpha8>(7, 7), result);
         Assert.IsTrue(result.ToArray2D().ContentEquals(r));
         result.Free();
     }
 }
        public unsafe void FullyConnectedForward()
        {
            FullyConnectedLayer fc = new FullyConnectedLayer(TensorInfo.Linear(231), 125, ActivationType.Sigmoid, WeightsInitializationMode.GlorotUniform, BiasInitializationMode.Gaussian);
            Tensor x = CreateRandomTensor(400, fc.InputInfo.Size);

            fixed(float *pw = fc.Weights, pb = fc.Biases)
            {
                Tensor.Reshape(pw, fc.InputInfo.Size, fc.OutputInfo.Size, out Tensor w);
                Tensor.Reshape(pb, 1, fc.OutputInfo.Size, out Tensor b);
                Tensor.New(x.Entities, fc.OutputInfo.Size, out Tensor y1);
                CpuDnn.FullyConnectedForward(x, w, b, y1);
                Gpu gpu = Gpu.Default;

                using (DeviceMemory <float>
                       x_gpu = gpu.AllocateDevice(x),
                       w_gpu = gpu.AllocateDevice(w),
                       b_gpu = gpu.AllocateDevice(b),
                       y_gpu = gpu.AllocateDevice <float>(y1.Size))
                {
                    Dnn.Get(gpu).FullyConnectedForward(x.Entities, x.Length, y1.Length, x_gpu.Ptr, w_gpu.Ptr, b_gpu.Ptr, y_gpu.Ptr);
                    y_gpu.CopyToHost(y1.Entities, y1.Length, out Tensor y2);
                    Assert.IsTrue(y1.ContentEquals(y2));
                    Tensor.Free(x, y1, y2);
                }
            }
        }
        public unsafe void FullyConnectedBackwardData()
        {
            FullyConnectedLayer fc = new FullyConnectedLayer(TensorInfo.Linear(231), 125, ActivationType.Sigmoid, WeightsInitializationMode.GlorotUniform, BiasInitializationMode.Gaussian);
            Tensor dy = CreateRandomTensor(400, fc.OutputInfo.Size);

            fixed(float *pw = fc.Weights, pb = fc.Biases)
            {
                Tensor.Reshape(pw, fc.InputInfo.Size, fc.OutputInfo.Size, out Tensor w);
                Tensor.Reshape(pb, 1, fc.OutputInfo.Size, out Tensor b);
                Tensor.New(dy.Entities, fc.InputInfo.Size, out Tensor dx1);
                CpuDnn.FullyConnectedBackwardData(w, dy, dx1);
                Gpu gpu = Gpu.Default;

                using (DeviceMemory <float>
                       dy_gpu = gpu.AllocateDevice(dy),
                       w_gpu = gpu.AllocateDevice(w),
                       dx_gpu = gpu.AllocateDevice <float>(dx1.Size))
                {
                    Dnn.Get(gpu).FullyConnectedBackwardData(dy.Entities, fc.InputInfo.Size, fc.OutputInfo.Size, dy_gpu.Ptr, w_gpu.Ptr, dx_gpu.Ptr);
                    dx_gpu.CopyToHost(dx1.Entities, dx1.Length, out Tensor dx2);
                    Assert.IsTrue(dx1.ContentEquals(dx2));
                    Tensor.Free(dy, dx1, dx2);
                }
            }
        }
        public void SoftmaxForward()
        {
            OutputLayerBase
                cpu = new SoftmaxLayer(TensorInfo.Linear(250), 127, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnSoftmaxLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases);

            TestForward(cpu, gpu, 400);
        }
        public void FullyConnectedBackward()
        {
            FullyConnectedLayer
                cpu = new FullyConnectedLayer(TensorInfo.Linear(250), 127, ActivationType.LeCunTanh, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnFullyConnectedLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases, cpu.ActivationType);

            TestBackward(cpu, gpu, 400);
        }
        public void SpatialBatchNormalizationBackward()
        {
            BatchNormalizationLayerBase
                cpu = new BatchNormalizationLayer(TensorInfo.Volume(12, 12, 13), NormalizationMode.Spatial, ActivationType.ReLU),
                gpu = new CuDnnBatchNormalizationLayer(cpu.InputInfo, NormalizationMode.Spatial, cpu.Weights, cpu.Biases, cpu.Iteration, cpu.Mu.AsSpan().ToArray(), cpu.Sigma2.AsSpan().ToArray(), cpu.ActivationType);

            TestBackward(cpu, gpu, 400);
        }
        public void PerActivationBatchNormalizationBackward()
        {
            BatchNormalizationLayerBase
                cpu = new BatchNormalizationLayer(TensorInfo.Linear(250), NormalizationMode.PerActivation, ActivationType.ReLU),
                gpu = new CuDnnBatchNormalizationLayer(cpu.InputInfo, NormalizationMode.PerActivation, cpu.Weights, cpu.Biases, cpu.Iteration, cpu.Mu.AsSpan().ToArray(), cpu.Sigma2.AsSpan().ToArray(), cpu.ActivationType);

            TestBackward(cpu, gpu, 400);
        }
        public void SoftmaxForward()
        {
            float[,] x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(400), 250, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(400, 250);
            OutputLayerBase
                cpu = new SoftmaxLayer(TensorInfo.Linear(250), 127, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnSoftmaxLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases);

            TestForward(cpu, gpu, x);
        }
Exemple #12
0
        public static INeuralNetwork NewGraph(TensorInfo input, [NotNull] Action <NodeBuilder> builder)
        {
            NodeBuilder root = NodeBuilder.Input();

            builder(root);
            ComputationGraph graph = ComputationGraph.New(input, root);

            return(new ComputationGraphNetwork(graph));
        }
        public void FullyConnectedForward()
        {
            float[,] x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(400), 250, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(400, 250);
            FullyConnectedLayer
                cpu = new FullyConnectedLayer(TensorInfo.Linear(250), 127, ActivationFunctionType.LeCunTanh, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnFullyConnectedLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases, cpu.ActivationFunctionType);

            TestForward(cpu, gpu, x);
        }
        public void PoolingForward()
        {
            float[,] x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(400), 58 * 58 * 3, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(400, 58 * 58 * 3);
            PoolingLayer
                cpu = new PoolingLayer(new TensorInfo(58, 58, 3), PoolingInfo.Default, ActivationFunctionType.LeakyReLU),
                gpu = new CuDnnPoolingLayer(cpu.InputInfo, PoolingInfo.Default, ActivationFunctionType.LeakyReLU);

            TestForward(cpu, gpu, x);
        }
        public void ConvolutionForward()
        {
            float[,] x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(127), 58 * 58 * 3, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(127, 58 * 58 * 3);
            ConvolutionalLayer
                cpu = new ConvolutionalLayer(new TensorInfo(58, 58, 3), ConvolutionInfo.Default, (5, 5), 20, ActivationFunctionType.LeakyReLU, BiasInitializationMode.Gaussian),
                gpu = new CuDnnConvolutionalLayer(cpu.InputInfo, ConvolutionInfo.Default, cpu.KernelInfo, cpu.OutputInfo, cpu.Weights, cpu.Biases, cpu.ActivationFunctionType);

            TestForward(cpu, gpu, x);
        }
        public unsafe void InceptionPoolPipeline()
        {
            float[,] x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(10), 12 * 12 * 3, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(10, 12 * 12 * 3);
            CuDnnPoolingLayer       pool      = new CuDnnPoolingLayer(TensorInfo.Image <Rgb24>(12, 12), PoolingInfo.New(PoolingMode.Max, 3, 3, 1, 1, 1, 1), ActivationType.ReLU);
            CuDnnConvolutionalLayer conv      = new CuDnnConvolutionalLayer(pool.OutputInfo, ConvolutionInfo.New(ConvolutionMode.CrossCorrelation), (1, 1), 10, ActivationType.ReLU, BiasInitializationMode.Gaussian);
            CuDnnInceptionLayer     inception = new CuDnnInceptionLayer(TensorInfo.Image <Rgb24>(12, 12), InceptionInfo.New(3, 2, 2, 2, 2, PoolingMode.Max, 10));

            fixed(float *pw = inception.Weights)
            Unsafe.InitBlock(pw, 0, (uint)(sizeof(float) * inception.Weights.Length));

            Buffer.BlockCopy(conv.Weights, 0, inception.Weights, sizeof(float) * (3 * 3 + 3 * 2 + 3 * 3 * 2 * 2 + 3 * 2 + 5 * 5 * 2 * 2), sizeof(float) * conv.Weights.Length);
            Buffer.BlockCopy(conv.Biases, 0, inception.Biases, sizeof(float) * (3 + 2 + 2 + 2 + 2), sizeof(float) * conv.Biases.Length);
            fixed(float *px = x)
            {
                // Forward + Z
                Tensor.Reshape(px, x.GetLength(0), x.GetLength(1), out Tensor xTensor);
                pool.Forward(xTensor, out Tensor zTemp, out Tensor aTemp);
                conv.Forward(aTemp, out Tensor zConv, out Tensor aConv);
                inception.Forward(xTensor, out Tensor zInc, out Tensor aInc);
                Tensor.New(zConv.Entities, zConv.Length, out Tensor reshaped);
                float *pzInc = (float *)zInc.Ptr.ToPointer() + 12 * 12 * (3 + 2 + 2), preshaped = (float *)reshaped.Ptr.ToPointer();

                for (int i = 0; i < zConv.Entities; i++)
                {
                    Buffer.MemoryCopy(pzInc + i * zInc.Length, preshaped + i * zConv.Length, sizeof(float) * zConv.Length, sizeof(float) * zConv.Length);
                }
                Assert.IsTrue(reshaped.ContentEquals(zConv));

                // A
                float *paInc = (float *)aInc.Ptr.ToPointer() + 12 * 12 * (3 + 2 + 2);

                for (int i = 0; i < aConv.Entities; i++)
                {
                    Buffer.MemoryCopy(paInc + i * aInc.Length, preshaped + i * aConv.Length, sizeof(float) * aConv.Length, sizeof(float) * aConv.Length);
                }
                Assert.IsTrue(reshaped.ContentEquals(aConv));

                // Backpropagation
                Tensor.Like(aTemp, out Tensor convdx);
                Tensor.Like(xTensor, out Tensor pooldx);
                Tensor.Like(xTensor, out Tensor incdx);
                conv.Backpropagate(aTemp, zConv, aConv, convdx, out Tensor convdJdw, out Tensor convdJdb);
                pool.Backpropagate(xTensor, zTemp, convdx, pooldx);
                inception.Backpropagate(xTensor, zInc, aInc, incdx, out Tensor incdJdw, out Tensor incdJdb);
                Assert.IsTrue(incdx.ContentEquals(pooldx));

                // Gradient
                Tensor.Reshape((float *)incdJdw.Ptr.ToPointer() + (3 * 3 + 3 * 2 + 3 * 3 * 2 * 2 + 3 * 2 + 5 * 5 * 2 * 2), 1, convdJdw.Size, out Tensor dJdwInc0);
                Tensor.Reshape((float *)incdJdb.Ptr.ToPointer() + 11, 1, convdJdb.Size, out Tensor dJdbInc0);
                Assert.IsTrue(convdJdw.ContentEquals(dJdwInc0, 1e-5f));
                Assert.IsTrue(convdJdb.ContentEquals(dJdbInc0, 1e-5f));

                // Cleanup
                Tensor.Free(zTemp, aTemp, zConv, aConv, zInc, aInc, reshaped, convdx, pooldx, incdx, convdJdw, convdJdb, incdJdw, incdJdb);
            }
        }
Exemple #17
0
 public static INeuralNetwork NewSequential(TensorInfo input, [NotNull, ItemNotNull] params LayerFactory[] factories)
 {
     return(new SequentialNetwork(factories.Aggregate(new List <INetworkLayer>(), (l, f) =>
     {
         INetworkLayer layer = f(input);
         input = layer.OutputInfo;
         l.Add(layer);
         return l;
     }).ToArray()));
 }
        public void ConvolutionInfoFactory()
        {
            ConvolutionInfo info = ConvolutionInfo.Same()(TensorInfo.Image <Alpha8>(28, 28), (3, 3));

            Assert.IsTrue(info.VerticalPadding == 1 && info.HorizontalPadding == 1);
            info = ConvolutionInfo.Same()(TensorInfo.Image <Alpha8>(28, 28), (5, 5));
            Assert.IsTrue(info.VerticalPadding == 2 && info.HorizontalPadding == 2);
            info = ConvolutionInfo.Same(ConvolutionMode.Convolution, 2, 2)(TensorInfo.Image <Alpha8>(10, 10), (3, 3));
            Assert.IsTrue(info.VerticalPadding == 6 && info.HorizontalPadding == 6);
        }
 public void Initialization1()
 {
     INeuralNetwork network = NetworkManager.NewGraph(TensorInfo.Image <Alpha8>(60, 60), root =>
     {
         var conv1 = root.Layer(NetworkLayers.Convolutional((5, 5), 10, ActivationType.Identity));
         var pool1 = conv1.Layer(NetworkLayers.Pooling(ActivationType.LeakyReLU));
         var conv2 = pool1.Layer(NetworkLayers.Convolutional((3, 3), 10, ActivationType.Identity));
         var pool2 = conv2.Layer(NetworkLayers.Pooling(ActivationType.ReLU));
         var fc    = pool2.Layer(NetworkLayers.FullyConnected(64, ActivationType.LeCunTanh));
         _         = fc.Layer(NetworkLayers.Softmax(10));
     });
 public void Init()
 {
     INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.Image <Alpha8>(20, 20),
                                                           CuDnnNetworkLayers.Convolutional((5, 5), 20, ActivationType.Identity),
                                                           CuDnnNetworkLayers.Pooling(ActivationType.LeakyReLU),
                                                           CuDnnNetworkLayers.Convolutional((3, 3), 40, ActivationType.LeakyReLU),
                                                           CuDnnNetworkLayers.Pooling(ActivationType.LeakyReLU),
                                                           CuDnnNetworkLayers.FullyConnected(125, ActivationType.LeakyReLU),
                                                           CuDnnNetworkLayers.FullyConnected(64, ActivationType.LeakyReLU),
                                                           CuDnnNetworkLayers.Softmax(10));
 }
 private static unsafe Tensor CreateRandomTensor(int entities, int length)
 {
     float[] v = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(entities), length, WeightsInitializationMode.GlorotNormal);
     Tensor.New(entities, length, out Tensor tensor);
     fixed(float *pv = v)
     {
         Tensor.Reshape(pv, entities, length, out Tensor source);
         tensor.Overwrite(source);
         return(tensor);
     }
 }
 public void StreamSerialize()
 {
     using (MemoryStream stream = new MemoryStream())
     {
         float[] w = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(784), 30, WeightsInitializationMode.GlorotNormal);
         stream.WriteShuffled(w);
         Assert.IsTrue(stream.Position == sizeof(float) * w.Length);
         stream.Seek(0, SeekOrigin.Begin);
         float[] t = stream.ReadUnshuffled(w.Length);
         Assert.IsTrue(w.ContentEquals(t));
     }
 }
        public void SoftmaxBackwardOutput()
        {
            float[,] y = new float[400, 127];
            for (int i = 0; i < 400; i++)
            {
                y[i, ThreadSafeRandom.NextInt(max: 127)] = 1;
            }
            OutputLayerBase
                cpu = new SoftmaxLayer(TensorInfo.Linear(250), 127, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnSoftmaxLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases);

            TestBackward(cpu, gpu, y);
        }
        public unsafe void Inception1x1()
        {
            float[,] x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(10), 32 * 32 * 3, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(10, 32 * 32 * 3);
            CuDnnConvolutionalLayer conv      = new CuDnnConvolutionalLayer(TensorInfo.Image <Rgb24>(32, 32), ConvolutionInfo.New(ConvolutionMode.CrossCorrelation), (1, 1), 10, ActivationType.ReLU, BiasInitializationMode.Gaussian);
            CuDnnInceptionLayer     inception = new CuDnnInceptionLayer(conv.InputInfo, InceptionInfo.New(10, 10, 10, 10, 10, PoolingMode.Max, 10));

            fixed(float *pw = inception.Weights)
            Unsafe.InitBlock(pw, 0, (uint)(sizeof(float) * inception.Weights.Length));

            Buffer.BlockCopy(conv.Weights, 0, inception.Weights, 0, sizeof(float) * conv.Weights.Length);
            Buffer.BlockCopy(conv.Biases, 0, inception.Biases, 0, sizeof(float) * conv.Biases.Length);
            fixed(float *px = x)
            {
                // Forward + Z
                Tensor.Reshape(px, x.GetLength(0), x.GetLength(1), out Tensor xTensor);
                conv.Forward(xTensor, out Tensor zConv, out Tensor aConv);
                inception.Forward(xTensor, out Tensor zInc, out Tensor aInc);
                Tensor.New(zConv.Entities, zConv.Length, out Tensor reshaped);
                float *pzInc = (float *)zInc.Ptr.ToPointer(), preshaped = (float *)reshaped.Ptr.ToPointer();

                for (int i = 0; i < zConv.Entities; i++)
                {
                    Buffer.MemoryCopy(pzInc + i * zInc.Length, preshaped + i * zConv.Length, sizeof(float) * zConv.Length, sizeof(float) * zConv.Length);
                }
                Assert.IsTrue(reshaped.ContentEquals(zConv));

                // A
                float *paInc = (float *)aInc.Ptr.ToPointer();

                for (int i = 0; i < aConv.Entities; i++)
                {
                    Buffer.MemoryCopy(paInc + i * aInc.Length, preshaped + i * aConv.Length, sizeof(float) * aConv.Length, sizeof(float) * aConv.Length);
                }
                Assert.IsTrue(reshaped.ContentEquals(aConv));

                // Backpropagate
                Tensor.Like(xTensor, out Tensor dx1);
                Tensor.Like(xTensor, out Tensor dx2);
                conv.Backpropagate(xTensor, zConv, aConv, dx1, out Tensor dJdw1, out Tensor dJdb1);
                inception.Backpropagate(xTensor, zInc, aInc, dx2, out Tensor dJdw2, out Tensor dJdb2);
                Assert.IsTrue(dx1.ContentEquals(dx2));
                Tensor.Reshape((float *)dJdw2.Ptr.ToPointer(), 1, dJdw1.Size, out dJdw2);
                Tensor.Reshape((float *)dJdb2.Ptr.ToPointer(), 1, dJdb1.Size, out dJdb2);
                Assert.IsTrue(dJdw1.ContentEquals(dJdw2, 1e-5f));
                Assert.IsTrue(dJdb1.ContentEquals(dJdb2, 1e-5f));

                // Cleanup
                Tensor.Free(zConv, aConv, zInc, aInc, reshaped, dx1, dx2, dJdw1, dJdw2, dJdb1, dJdb2);
            }
        }
        public static async Task Main()
        {
            // Create the network
            INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.Image <Alpha8>(28, 28),
                                                                  CuDnnNetworkLayers.Convolutional((5, 5), 20, ActivationType.Identity),
                                                                  CuDnnNetworkLayers.Pooling(ActivationType.LeakyReLU),
                                                                  CuDnnNetworkLayers.Convolutional((3, 3), 40, ActivationType.Identity),
                                                                  CuDnnNetworkLayers.Pooling(ActivationType.LeakyReLU),
                                                                  CuDnnNetworkLayers.FullyConnected(125, ActivationType.LeCunTanh),
                                                                  CuDnnNetworkLayers.Softmax(10));

            // Prepare the dataset
            ITrainingDataset trainingData = await Mnist.GetTrainingDatasetAsync(400); // Batches of 400 samples

            ITestDataset testData = await Mnist.GetTestDatasetAsync(p => Printf($"Epoch {p.Iteration}, cost: {p.Result.Cost}, accuracy: {p.Result.Accuracy}"));

            if (trainingData == null || testData == null)
            {
                Printf("Error downloading the datasets");
                Console.ReadKey();
                return;
            }

            // Setup and network training
            CancellationTokenSource cts = new CancellationTokenSource();

            Console.CancelKeyPress += (s, e) => cts.Cancel();
            TrainingSessionResult result = await NetworkManager.TrainNetworkAsync(network,
                                                                                  trainingData,
                                                                                  TrainingAlgorithms.AdaDelta(),
                                                                                  20, 0.5f,
                                                                                  TrackBatchProgress,
                                                                                  testDataset : testData, token : cts.Token);

            // Save the training reports
            string
                timestamp = DateTime.Now.ToString("yy-MM-dd-hh-mm-ss"),
                path      = Path.GetDirectoryName(Path.GetFullPath(Assembly.GetExecutingAssembly().Location)),
                dir       = Path.Combine(path ?? throw new InvalidOperationException("The dll path can't be null"), "TrainingResults", timestamp);

            Directory.CreateDirectory(dir);
            File.WriteAllText(Path.Combine(dir, $"{timestamp}_cost.py"), result.TestReports.AsPythonMatplotlibChart(TrainingReportType.Cost));
            File.WriteAllText(Path.Combine(dir, $"{timestamp}_accuracy.py"), result.TestReports.AsPythonMatplotlibChart(TrainingReportType.Accuracy));
            network.Save(new FileInfo(Path.Combine(dir, $"{timestamp}{NetworkLoader.NetworkFileExtension}")));
            File.WriteAllText(Path.Combine(dir, $"{timestamp}.json"), network.SerializeMetadataAsJson());
            File.WriteAllText(Path.Combine(dir, $"{timestamp}_report.json"), result.SerializeAsJson());
            Printf($"Stop reason: {result.StopReason}, elapsed time: {result.TrainingTime}");
            Console.ReadKey();
        }
        public void ForwardTest1()
        {
            INeuralNetwork cpu = NetworkManager.NewGraph(TensorInfo.Image <Alpha8>(28, 28), root =>
            {
                var fc1 = root.Layer(NetworkLayers.FullyConnected(100, ActivationType.Sigmoid));
                fc1.Layer(NetworkLayers.Softmax(10));
            });
            INeuralNetwork gpu = NetworkManager.NewGraph(TensorInfo.Image <Alpha8>(28, 28), root =>
            {
                var fc1l = cpu.Layers[0].To <INetworkLayer, FullyConnectedLayer>();
                var fc1  = root.Layer(_ => new CuDnnFullyConnectedLayer(fc1l.InputInfo, 100, fc1l.Weights, fc1l.Biases, fc1l.ActivationType));
                var sm1l = cpu.Layers[1].To <INetworkLayer, SoftmaxLayer>();
                fc1.Layer(_ => new CuDnnSoftmaxLayer(sm1l.InputInfo, sm1l.OutputInfo.Size, sm1l.Weights, sm1l.Biases));
            });

            ForwardTest(cpu, gpu);
        }
        public unsafe void ConvolutionGradient()
        {
            float[,]
            x     = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(127), 58 * 58 * 3, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(127, 58 * 58 * 3),
            delta = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(127), 54 * 54 * 5, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(127, 54 * 54 * 5);
            ConvolutionalLayer
                cpu = new ConvolutionalLayer(new TensorInfo(58, 58, 3), ConvolutionInfo.Default, (5, 5), 5, ActivationFunctionType.LeCunTanh, BiasInitializationMode.Gaussian),
                gpu = new CuDnnConvolutionalLayer(cpu.InputInfo, ConvolutionInfo.Default, cpu.KernelInfo, cpu.OutputInfo, cpu.Weights, cpu.Biases, ActivationFunctionType.LeCunTanh);

            fixed(float *px = x)
            {
                Tensor.Reshape(px, x.GetLength(0), x.GetLength(1), out Tensor xTensor);
                gpu.Forward(xTensor, out Tensor z_gpu, out Tensor a_gpu);
                z_gpu.Free();
                a_gpu.Free();
            }

            TestGradient(cpu, gpu, x, delta);
        }
Exemple #28
0
        public TrainingSessionResult Train(ITrainingDataset data, ITestDataset testData)
        {
            INeuralNetwork net = NetworkManager.NewSequential(TensorInfo.Linear(SubHistory.SubHistoryLength),
                                                              NetworkLayers.FullyConnected(SubHistory.SubHistoryLength, ActivationType.LeCunTanh),
                                                              NetworkLayers.Softmax(IMoveEngine.Payoffs));
            TrainingSessionResult result = NetworkManager.TrainNetwork(net,
                                                                       data,
                                                                       TrainingAlgorithms.AdaDelta(),
                                                                       100, 0.0f,
                                                                       null,
                                                                       testDataset: testData);

            if (result.StopReason == TrainingStopReason.EpochsCompleted)
            {
                _storage.Save(net);
                _network = net;
            }
            return(result);
        }
        public void JsonMetadataSerialization()
        {
            INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.Image <Rgb24>(120, 120),
                                                                  NetworkLayers.Convolutional((10, 10), 20, ActivationType.AbsoluteReLU),
                                                                  NetworkLayers.Convolutional((5, 5), 20, ActivationType.ELU),
                                                                  NetworkLayers.Convolutional((10, 10), 20, ActivationType.Identity),
                                                                  NetworkLayers.Pooling(ActivationType.ReLU),
                                                                  NetworkLayers.Convolutional((10, 10), 20, ActivationType.Identity),
                                                                  NetworkLayers.Pooling(ActivationType.Identity),
                                                                  NetworkLayers.BatchNormalization(NormalizationMode.Spatial, ActivationType.ReLU),
                                                                  NetworkLayers.FullyConnected(125, ActivationType.Tanh),
                                                                  NetworkLayers.Softmax(133));
            string metadata1 = network.SerializeMetadataAsJson();

            Assert.IsTrue(metadata1.Length > 0);
            Assert.IsTrue(metadata1.Equals(network.Clone().SerializeMetadataAsJson()));
            network.Layers.First().To <INetworkLayer, ConvolutionalLayer>().Weights[0] += 0.1f;
            Assert.IsFalse(metadata1.Equals(network.SerializeMetadataAsJson()));
        }
Exemple #30
0
        public unsafe void Pool1()
        {
            // Down
            float[,]
            m =
            {
                {
                    -1, 0, 1, 2,
                    1, 1, 1, 1,
                    0, -0.3f, -5, -0.5f,
                    -1, 10, -2, -1
                }
            },
            r =
            {
                {
                    1, 2,
                    10, -0.5f
                }
            };
            fixed(float *pm = m)
            {
                Tensor.Reshape(pm, 1, 16, out Tensor mTensor);
                Tensor.New(1, 4, out Tensor result);
                CpuDnn.PoolingForward(mTensor, TensorInfo.Image <Alpha8>(4, 4), result);
                Assert.IsTrue(result.ToArray2D().ContentEquals(r));

                // Upscale
                CpuDnn.PoolingBackward(mTensor, TensorInfo.Image <Alpha8>(4, 4), result, mTensor);
                float[,] expected =
                {
                    {
                        0, 0, 0, 2,
                        1, 0, 0, 0,
                        0, 0, 0, -0.5f,
                        0, 10, 0, 0
                    }
                };
                Assert.IsTrue(mTensor.ToArray2D().ContentEquals(expected));
                result.Free();
            }
        }