public void Train_Multilayer_SaveThenLoad()
        {
            var network = new MultilayerNetwork(4, new[] { 4, 2 });

            var bp = new BackPropagationLearning(network);

            var input = ColumnVector1D.Create(0, 0.2, 0.66, 0.28);
            var output = ColumnVector1D.Create(0, 0, 0, 0.999);

            var err = bp.Train(input, output);

            using (var ms = new MemoryStream())
            {
                network.Save(ms);

                ms.Position = 0;

                var network2 = MultilayerNetwork.LoadData(ms);

                Assert.That(network2 != null);

                var output2 = network2.Evaluate(ColumnVector1D.Create(0.1, 0.3, 0.2, 0.1));

                Assert.That(output2 != null);
            }
        }
예제 #2
0
        public void Save_And_Load()
        {
            var parameters = new NetworkParameters(new int[] { 2, 8, 4 })
            {
                LearningRate = 0.123
            };

            var network = new MultilayerNetwork(parameters);

            {
                int li = 0;

                network.ForEachLayer(l =>
                {
                    li += 10;

                    l.ForEachNeuron((n, i) =>
                    {
                        n.Adjust((w, wi) => wi * li);
                        return 0;
                    }).ToList();
                    return 0;
                }).ToList();
            }

            byte[] serialisedData;

            using (var blobStore = new MemoryStream())
            {
                network.Save(blobStore);

                blobStore.Flush();

                serialisedData = blobStore.ToArray();
            }

            using (var blobStoreRead = new MemoryStream(serialisedData))
            {
                var network2 = MultilayerNetwork.LoadData(blobStoreRead);

                Assert.That(network2.Parameters.LearningRate, Is.EqualTo(0.123));
                Assert.That(network2.Parameters.InputVectorSize, Is.EqualTo(2));
                Assert.That(network2.Parameters.OutputVectorSize, Is.EqualTo(4));
                Assert.That(network2.Layers.Count(), Is.EqualTo(3));

                int li = 0;

                network2.ForEachLayer(l =>
                {
                    li += 10;

                    l.ForEachNeuron((n, i) =>
                    {
                        n.Adjust((w, wi) =>
                        {
                            Assert.That(w == wi * li);
                            return w;
                        });
                        return 0;
                    }).ToList();
                    return 0;
                }).ToList();
            }
        }