Esempio n. 1
0
        private void CrearNeuralNetwork()
        {
            int neuronasOcultas  = (int)spinNeuronasOculta.Value;
            var activacionOculta = cboFuncionActivacionOculta.SelectedItem as EnumInfo <ActivationType>;
            var pesosOculta      = cboPesosOculta.SelectedItem as EnumInfo <WeightsInitializationMode>;
            var biasOculta       = cboBiasOculta.SelectedItem as EnumInfo <BiasInitializationMode>;

            var activacionSalida = cboFuncionActivacionSalida.SelectedItem as EnumInfo <ActivationType>;
            var funcionCosto     = cboFuncionCosto.SelectedItem as EnumInfo <CostFunctionType>;
            var pesosSalida      = cboPesosSalida.SelectedItem as EnumInfo <WeightsInitializationMode>;
            var biasSalida       = cboBiasSalida.SelectedItem as EnumInfo <BiasInitializationMode>;

            LayerFactory layerSalida;

            if (activacionSalida.Valor == ActivationType.Softmax)
            {
                layerSalida = NetworkLayers.Softmax(3, pesosOculta.Valor, biasOculta.Valor);
            }
            else
            {
                layerSalida = NetworkLayers.FullyConnected(3, activacionSalida.Valor, funcionCosto.Valor, pesosSalida.Valor, biasSalida.Valor);
            }

            _neuralNetwork = NetworkManager.NewSequential(TensorInfo.Linear(4),
                                                          NetworkLayers.FullyConnected(neuronasOcultas, activacionOculta.Valor, pesosOculta.Valor, biasOculta.Valor),
                                                          layerSalida);
        }
Esempio n. 2
0
        public static async Task Main()
        {
            // Create the network
            INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.Image <Alpha8>(28, 28),
                                                                  NetworkLayers.Convolutional((5, 5), 20, ActivationType.Identity),
                                                                  NetworkLayers.Pooling(ActivationType.LeakyReLU),
                                                                  NetworkLayers.FullyConnected(100, ActivationType.LeCunTanh),
                                                                  NetworkLayers.Softmax(10));

            // Prepare the dataset
            ITrainingDataset trainingData = await Mnist.GetTrainingDatasetAsync(100); // Batches of 100 samples

            ITestDataset testData = await Mnist.GetTestDatasetAsync(p => Printf($"Epoch {p.Iteration}, cost: {p.Result.Cost}, accuracy: {p.Result.Accuracy}"));

            if (trainingData == null || testData == null)
            {
                Printf("Error downloading the datasets");
                Console.ReadKey();
                return;
            }

            // Train the network
            TrainingSessionResult result = await NetworkManager.TrainNetworkAsync(network,
                                                                                  trainingData,
                                                                                  TrainingAlgorithms.AdaDelta(),
                                                                                  60, 0.5f,
                                                                                  TrackBatchProgress,
                                                                                  testDataset : testData);

            Printf($"Stop reason: {result.StopReason}, elapsed time: {result.TrainingTime}");
            Console.ReadKey();
        }
 public void Initialization1()
 {
     INeuralNetwork network = NetworkManager.NewGraph(TensorInfo.Image <Alpha8>(60, 60), root =>
     {
         var conv1 = root.Layer(NetworkLayers.Convolutional((5, 5), 10, ActivationType.Identity));
         var pool1 = conv1.Layer(NetworkLayers.Pooling(ActivationType.LeakyReLU));
         var conv2 = pool1.Layer(NetworkLayers.Convolutional((3, 3), 10, ActivationType.Identity));
         var pool2 = conv2.Layer(NetworkLayers.Pooling(ActivationType.ReLU));
         var fc    = pool2.Layer(NetworkLayers.FullyConnected(64, ActivationType.LeCunTanh));
         _         = fc.Layer(NetworkLayers.Softmax(10));
     });
        public void ForwardTest1()
        {
            INeuralNetwork cpu = NetworkManager.NewGraph(TensorInfo.Image <Alpha8>(28, 28), root =>
            {
                var fc1 = root.Layer(NetworkLayers.FullyConnected(100, ActivationType.Sigmoid));
                fc1.Layer(NetworkLayers.Softmax(10));
            });
            INeuralNetwork gpu = NetworkManager.NewGraph(TensorInfo.Image <Alpha8>(28, 28), root =>
            {
                var fc1l = cpu.Layers[0].To <INetworkLayer, FullyConnectedLayer>();
                var fc1  = root.Layer(_ => new CuDnnFullyConnectedLayer(fc1l.InputInfo, 100, fc1l.Weights, fc1l.Biases, fc1l.ActivationType));
                var sm1l = cpu.Layers[1].To <INetworkLayer, SoftmaxLayer>();
                fc1.Layer(_ => new CuDnnSoftmaxLayer(sm1l.InputInfo, sm1l.OutputInfo.Size, sm1l.Weights, sm1l.Biases));
            });

            ForwardTest(cpu, gpu);
        }
        public void JsonMetadataSerialization()
        {
            INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.Image <Rgb24>(120, 120),
                                                                  NetworkLayers.Convolutional((10, 10), 20, ActivationType.AbsoluteReLU),
                                                                  NetworkLayers.Convolutional((5, 5), 20, ActivationType.ELU),
                                                                  NetworkLayers.Convolutional((10, 10), 20, ActivationType.Identity),
                                                                  NetworkLayers.Pooling(ActivationType.ReLU),
                                                                  NetworkLayers.Convolutional((10, 10), 20, ActivationType.Identity),
                                                                  NetworkLayers.Pooling(ActivationType.Identity),
                                                                  NetworkLayers.BatchNormalization(NormalizationMode.Spatial, ActivationType.ReLU),
                                                                  NetworkLayers.FullyConnected(125, ActivationType.Tanh),
                                                                  NetworkLayers.Softmax(133));
            string metadata1 = network.SerializeMetadataAsJson();

            Assert.IsTrue(metadata1.Length > 0);
            Assert.IsTrue(metadata1.Equals(network.Clone().SerializeMetadataAsJson()));
            network.Layers.First().To <INetworkLayer, ConvolutionalLayer>().Weights[0] += 0.1f;
            Assert.IsFalse(metadata1.Equals(network.SerializeMetadataAsJson()));
        }
Esempio n. 6
0
        public TrainingSessionResult Train(ITrainingDataset data, ITestDataset testData)
        {
            INeuralNetwork net = NetworkManager.NewSequential(TensorInfo.Linear(SubHistory.SubHistoryLength),
                                                              NetworkLayers.FullyConnected(SubHistory.SubHistoryLength, ActivationType.LeCunTanh),
                                                              NetworkLayers.Softmax(IMoveEngine.Payoffs));
            TrainingSessionResult result = NetworkManager.TrainNetwork(net,
                                                                       data,
                                                                       TrainingAlgorithms.AdaDelta(),
                                                                       100, 0.0f,
                                                                       null,
                                                                       testDataset: testData);

            if (result.StopReason == TrainingStopReason.EpochsCompleted)
            {
                _storage.Save(net);
                _network = net;
            }
            return(result);
        }
        public void NetworkSerialization()
        {
            INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.Image <Rgb24>(120, 120),
                                                                  NetworkLayers.Convolutional((10, 10), 20, ActivationFunctionType.AbsoluteReLU),
                                                                  NetworkLayers.Convolutional((5, 5), 20, ActivationFunctionType.ELU),
                                                                  NetworkLayers.Convolutional((10, 10), 20, ActivationFunctionType.Identity),
                                                                  NetworkLayers.Pooling(ActivationFunctionType.ReLU),
                                                                  NetworkLayers.Convolutional((10, 10), 20, ActivationFunctionType.Identity),
                                                                  NetworkLayers.Pooling(ActivationFunctionType.ReLU),
                                                                  NetworkLayers.FullyConnected(125, ActivationFunctionType.Tanh),
                                                                  NetworkLayers.Softmax(133));

            using (MemoryStream stream = new MemoryStream())
            {
                network.Save(stream);
                stream.Seek(0, SeekOrigin.Begin);
                INeuralNetwork copy = NetworkLoader.TryLoad(stream, LayersLoadingPreference.Cpu);
                Assert.IsTrue(network.Equals(copy));
            }
        }
Esempio n. 8
0
 public INeuralNetwork BuildNeuralNetwork() =>
 NetworkManager.NewSequential(TensorInfo.Linear(ParametersLength * MemoryStates),
                              NetworkLayers.FullyConnected(50, ActivationType.ReLU),
                              NetworkLayers.FullyConnected(20, ActivationType.ReLU),
                              NetworkLayers.Softmax(EnvInstance.ActionSpace.Shape.Size));