예제 #1
0
        public void Logistic()
        {
            double actual;

            actual = new LogisticLoss(new[] { 0, 1, -0.5, 0.8 }).Loss(new[] { 0, 1, -0.5, 0.8 });
            Assert.AreEqual(2.8938223431265548, actual, 1e-10);

            actual = new LogisticLoss(new[] { 0, 1, -0.5, 0.8 }).Loss(new[] { 0, 1, 0.5, 0.8 });
            Assert.AreEqual(3.2544961033487954, actual, 1e-10);

            actual = new LogisticLoss(new[] { 0, 1, -0.5, 0.8 }).Loss(new[] { -5, 1, 0.5, 0.8 });
            Assert.AreEqual(3.2544961033487954, actual, 1e-10);

            actual = new LogisticLoss(new[] { 5.4, 1, -0.5, 0.8 }).Loss(new[] { -5.2, 1, 0.5, 0.8 });
            Assert.AreEqual(42.765372851511813, actual, 1e-10);

            actual = new LogisticLoss(new int[] { 0, 1, 0, 0 }).Loss(new double[] { 1, 1, 1, 1 });
            Assert.AreEqual(3.250459373221156, actual, 1e-10);

            actual = new LogisticLoss(new int[] { 0, 1, 0, 0 }).Loss(new double[] { 0, 0, 0, 0 });
            Assert.AreEqual(4, actual);

            actual = new LogisticLoss(new int[] { -1, 1, -1, -1 }).Loss(new double[] { -1, -1, -1, -1 });
            Assert.AreEqual(6.1358494549990832, actual, 1e-10);

            actual = new LogisticLoss(new int[] { 0, 0, 0, 1 }).Loss(new double[] { -1, 1, 1, 1 });
            Assert.AreEqual(4.6931544141101194, actual, 1e-10);

            actual = new LogisticLoss(new int[] { 0, 0, 0, 1 }).Loss(new double[] { -1, -1, -1, 1 });
            Assert.AreEqual(7.5785444958880461, actual, 1e-10);

            actual = new LogisticLoss(new int[] { -1, -1, -1, 1 }).Loss(new double[] { 0, 0, 0, 1 });
            Assert.AreEqual(4.8946361239720115, actual, 1e-10);

            actual = new LogisticLoss(new double[] { 0, 1, 0, 0 }).Loss(new double[] { 0, 0, 0, 0 });
            Assert.AreEqual(4, actual);

            Assert.AreEqual(0.31326168751822286d, new LogisticLoss().Loss(1, 1), 1e-10);
            Assert.AreEqual(1.3132616875182228d, new LogisticLoss().Loss(-1, 1), 1e-10);
            Assert.AreEqual(1.3132616875182228d, new LogisticLoss().Loss(1, -1), 1e-10);
            Assert.AreEqual(0.31326168751822286d, new LogisticLoss().Loss(-1, -1), 1e-10);

            Assert.AreEqual(0.0067153484891179669d, new LogisticLoss().Loss(1, 5), 1e-10);
            Assert.AreEqual(6.0024756851377301d, new LogisticLoss().Loss(-1, 6), 1e-10);
            Assert.AreEqual(7.0009114664537737d, new LogisticLoss().Loss(1, -7), 1e-10);
            Assert.AreEqual(0.00033540637289566238d, new LogisticLoss().Loss(-1, -8), 1e-10);

            Assert.AreEqual(1.3132616875182228d, new LogisticLoss().Loss(-1, 1), 1e-10);
            Assert.AreEqual(0.69314718055994529d, new LogisticLoss().Loss(1, 0), 1e-10);
            Assert.AreEqual(0.69314718055994529d, new LogisticLoss().Loss(-1, 0), 1e-10);

            Assert.AreEqual(0.0067153484891179669d, new LogisticLoss().Loss(true, 5), 1e-10);
            Assert.AreEqual(6.0024756851377301d, new LogisticLoss().Loss(false, 6), 1e-10);
            Assert.AreEqual(7.0009114664537737d, new LogisticLoss().Loss(true, -7), 1e-10);
            Assert.AreEqual(0.00033540637289566238d, new LogisticLoss().Loss(false, -8), 1e-10);

            Assert.AreEqual(1.3132616875182228d, new LogisticLoss().Loss(false, 1), 1e-10);
            Assert.AreEqual(0.69314718055994529d, new LogisticLoss().Loss(true, 0), 1e-10);
            Assert.AreEqual(0.69314718055994529d, new LogisticLoss().Loss(false, 0), 1e-10);
        }
예제 #2
0
        public static void Run()
        {
            // Create
            var trainX = new NDArray(new float[] { 0, 0, 0, 1, 1, 0, 1, 1 }).Reshape(4, 2);
            var trainY = new NDArray(new float[] { 0, 1, 1, 0 });

            var batch_size = 2;
            var train_data = new NDArrayIter(trainX, trainY, batch_size);
            var val_data   = new NDArrayIter(trainX, trainY, batch_size);

            var net = new Sequential();

            net.Add(new Dense(64, ActivationType.Relu));
            net.Add(new Dense(1));

            var gpus    = TestUtils.ListGpus();
            var ctxList = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu() };

            net.Initialize(new Uniform(), ctxList.ToArray());
            var   trainer             = new Trainer(net.CollectParams(), new Adam());
            var   epoch               = 1000;
            var   metric              = new BinaryAccuracy();
            var   binary_crossentropy = new LogisticLoss();
            float lossVal             = 0;

            for (var iter = 0; iter < epoch; iter++)
            {
                train_data.Reset();
                lossVal = 0;
                while (!train_data.End())
                {
                    var         batch   = train_data.Next();
                    var         data    = Utils.SplitAndLoad(batch.Data[0], ctxList);
                    var         label   = Utils.SplitAndLoad(batch.Label[0], ctxList);
                    NDArrayList outputs = null;
                    using (var ag = Autograd.Record())
                    {
                        outputs = Enumerable.Zip(data, label, (x, y) =>
                        {
                            var z        = net.Call(x);
                            NDArray loss = binary_crossentropy.Call(z, y);
                            loss.Backward();
                            lossVal += loss.Mean();
                            return(z);
                        }).ToList();
                    }

                    metric.Update(label, outputs.ToArray());
                    trainer.Step(batch.Data[0].Shape[0]);
                }

                var(name, acc) = metric.Get();
                metric.Reset();
                Console.WriteLine($"Loss: {lossVal}");
                Console.WriteLine($"Training acc at epoch {iter}: {name}={acc * 100}%");
            }
        }