Example #1
0
        private static void SampleParkinsons()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("parkinsons");

            IDataset dataset = Defaults.Datasets.Parkinsons();

            ITrainer trainer = sigma.CreateGhostTrainer("parkinsons-trainer");

            trainer.Network.Architecture = InputLayer.Construct(22)
                                           + FullyConnectedLayer.Construct(140)
                                           + FullyConnectedLayer.Construct(20)
                                           + FullyConnectedLayer.Construct(1)
                                           + OutputLayer.Construct(1)
                                           + SquaredDifferenceCostLayer.Construct();

            trainer.TrainingDataIterator = new MinibatchIterator(10, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.01);

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new UniClassificationAccuracyReporter("validation", 0.5, TimeStep.Every(1, TimeScale.Epoch)));

            sigma.AddTrainer(trainer);

            sigma.PrepareAndRun();
        }
Example #2
0
        private static void SampleWdbc()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("wdbc");

            IDataset dataset = Defaults.Datasets.Wdbc();

            ITrainer trainer = sigma.CreateGhostTrainer("wdbc-trainer");

            trainer.Network.Architecture = InputLayer.Construct(30)
                                           + FullyConnectedLayer.Construct(42)
                                           + FullyConnectedLayer.Construct(24)
                                           + FullyConnectedLayer.Construct(1)
                                           + OutputLayer.Construct(1)
                                           + SquaredDifferenceCostLayer.Construct();

            trainer.TrainingDataIterator = new MinibatchIterator(72, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.005);

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new UniClassificationAccuracyReporter("validation", 0.5, TimeStep.Every(1, TimeScale.Epoch)));

            sigma.AddTrainer(trainer);

            sigma.AddMonitor(new HttpMonitor("http://+:8080/sigma/"));

            sigma.PrepareAndRun();
        }
Example #3
0
 // single layer network
 public static NeuralNetwork<Vector> Create(IDataSet<Vector, Vector> dataSet, IActivator activator)
 {
     var workLayer = new FullyConnectedLayer(dataSet.FirstInput.Size, dataSet.FirstOutput.Size, activator);
     var outputLayer = new OutputLayer<Vector>();
     var layers = new CompositeLayer<Vector, Vector, Vector>(workLayer, outputLayer);
     return new NeuralNetwork<Vector>(layers);
 }
Example #4
0
        public static NeuralNetwork<Matrix> Create(IDataSet<Matrix, Vector> dataSet)
        {
            var count  = 5;

            var a = new ISingleLayer<Matrix, Matrix>[count];
            for (var i = 0; i < count; ++i)
                a[i] = new MatrixConvolutor(28, 28, 24, 24, new Tanh());

            var b = new ISingleLayer<Matrix, Matrix>[count];
            for (var i = 0; i < count; ++i)
                b[i] = new MatrixSubsampler(24, 24, 12, 12, new Tanh());

            var c = new ISingleLayer<Matrix, Matrix>[count];
            for (var i = 0; i < count; ++i)
                c[i] = new MatrixConvolutor(12, 12, 8, 8, new Tanh());

            var d = new ISingleLayer<Matrix, Matrix>[count];
            for (var i = 0; i < count; ++i)
                d[i] = new MatrixSubsampler(8, 8, 4, 4, new Tanh());

            var splitter    = new Splitter<Matrix, Matrix>(a);
            var applicator1 = new Applicator<Matrix, Matrix>(b);
            var applicator2 = new Applicator<Matrix, Matrix>(c);
            var merger      = new MatrixMerger<Matrix>(d);

            var classif  = new FullyConnectedLayer(16 * count, 10, new Tanh());

            var comp = CompositeLayer<Vector, Vector[], Vector>.Compose(splitter,
                                                                        applicator1,
                                                                        applicator2,
                                                                        merger,
                                                                        classif);

            return new NeuralNetwork<Matrix>(comp);
        }
Example #5
0
        private static NeuralNetwork InitializeNeuralNetwork(int seed)
        {
            Random random = new Random(seed == 0 ? new Random().Next() : seed);

            float RandomWeight() => (float)(random.NextDouble() * 2 - 1);

            Layer prevLayer;

            InputLayer li = new InputLayer(3, 5);

            prevLayer = li;

            ConvolutionalLayer l0 = new ConvolutionalLayer(8, 2, 1, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l0;
            prevLayer.InitializeWeights(RandomWeight);

            ConvolutionalLayer l2 = new ConvolutionalLayer(16, 2, 1, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l2;
            prevLayer.InitializeWeights(RandomWeight);

            FullyConnectedLayer l7 = new FullyConnectedLayer(16, prevLayer, ActivationFunctions.Sigmoid(1));

            prevLayer = l7;
            prevLayer.InitializeWeights(RandomWeight);

            FullyConnectedLayer l8 = new FullyConnectedLayer(10, prevLayer, ActivationFunctions.SoftMax(1));

            prevLayer = l8;
            prevLayer.InitializeWeights(RandomWeight);

            return(new NeuralNetwork(li, l0, l2, l7, l8));
        }
Example #6
0
        private static ITrainer CreateIrisTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Iris();

            ITrainer trainer = sigma.CreateTrainer("iris-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(4)
                                           + FullyConnectedLayer.Construct(12)
                                           + FullyConnectedLayer.Construct(3)
                                           + OutputLayer.Construct(3)
                                           + SquaredDifferenceCostLayer.Construct();
            //trainer.Network = Serialisation.ReadBinaryFileIfExists("iris.sgnet", trainer.Network);

            trainer.TrainingDataIterator = new MinibatchIterator(50, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.06);
            trainer.Operator  = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            //trainer.AddGlobalHook(new StopTrainingHook(atEpoch: 100));
            //trainer.AddLocalHook(new EarlyStopperHook("optimiser.cost_total", 20, target: ExtremaTarget.Min));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), reportEpochIteration: true));
            //.On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));
            //trainer.AddLocalHook(new DiskSaviorHook<INetwork>("network.self", Namers.Dynamic("iris_epoch{0}.sgnet", "epoch"), verbose: true)
            //    .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: 1));

            return(trainer);
        }
        public unsafe void FullyConnectedForward()
        {
            FullyConnectedLayer fc = new FullyConnectedLayer(TensorInfo.Linear(231), 125, ActivationType.Sigmoid, WeightsInitializationMode.GlorotUniform, BiasInitializationMode.Gaussian);
            Tensor x = CreateRandomTensor(400, fc.InputInfo.Size);

            fixed(float *pw = fc.Weights, pb = fc.Biases)
            {
                Tensor.Reshape(pw, fc.InputInfo.Size, fc.OutputInfo.Size, out Tensor w);
                Tensor.Reshape(pb, 1, fc.OutputInfo.Size, out Tensor b);
                Tensor.New(x.Entities, fc.OutputInfo.Size, out Tensor y1);
                CpuDnn.FullyConnectedForward(x, w, b, y1);
                Gpu gpu = Gpu.Default;

                using (DeviceMemory <float>
                       x_gpu = gpu.AllocateDevice(x),
                       w_gpu = gpu.AllocateDevice(w),
                       b_gpu = gpu.AllocateDevice(b),
                       y_gpu = gpu.AllocateDevice <float>(y1.Size))
                {
                    Dnn.Get(gpu).FullyConnectedForward(x.Entities, x.Length, y1.Length, x_gpu.Ptr, w_gpu.Ptr, b_gpu.Ptr, y_gpu.Ptr);
                    y_gpu.CopyToHost(y1.Entities, y1.Length, out Tensor y2);
                    Assert.IsTrue(y1.ContentEquals(y2));
                    Tensor.Free(x, y1, y2);
                }
            }
        }
Example #8
0
        public void ConstructorTest1()
        {
            Shape     shape           = new Shape(new int[] { 1, 10, 12, 3 });
            const int NumberOfNeurons = 100;

            foreach (MatrixLayout matrixLayout in Enum.GetValues(typeof(MatrixLayout)).OfType <MatrixLayout>())
            {
                FullyConnectedLayer layer = new FullyConnectedLayer(shape, NumberOfNeurons, matrixLayout, null);
                Assert.AreEqual("100N", layer.Architecture);
                CollectionAssert.AreEqual(new[] { 1, NumberOfNeurons }, layer.OutputShape.Axes);
                Assert.AreEqual(NumberOfNeurons, layer.NumberOfNeurons);
                Assert.AreEqual(matrixLayout, layer.MatrixLayout);

                CollectionAssert.AreEqual(
                    matrixLayout == MatrixLayout.RowMajor ?
                    new[] { NumberOfNeurons, 10 * 12 * 3 } :
                    new[] { 10 * 12 * 3, NumberOfNeurons },
                    layer.W.Axes);
                Assert.IsFalse(layer.W.Weights.Take(layer.W.Length).All(x => x == 0.0f));
                Assert.AreEqual(0.0, layer.W.Weights.Take(layer.W.Length).Average(), 0.01f);

                CollectionAssert.AreEqual(new[] { NumberOfNeurons }, layer.B.Axes);
                Assert.IsTrue(layer.B.Weights.Take(layer.B.Length).All(x => x == 0.0f));
            }
        }
Example #9
0
        /// <summary>
        /// Create a MNIST trainer (writing recognition) that will be added to an environemnt.
        /// </summary>
        /// <param name="sigma">The sigma environemnt this trainer will be assigned to.</param>
        /// <returns>The newly created trainer.</returns>
        private static ITrainer CreateMnistTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Mnist();

            ITrainer trainer = sigma.CreateTrainer("mnist-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(28, 28)
                                           + DropoutLayer.Construct(0.2)
                                           + FullyConnectedLayer.Construct(1000, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(800, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(10, activation: "sigmoid")
                                           + OutputLayer.Construct(10)
                                           + SoftMaxCrossEntropyCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(100, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(Defaults.Datasets.MnistValidation()));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.02);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.1));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new ValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Iteration), reportEpochIteration: true)
                                 .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(1, TimeScale.Epoch), 4));

            return(trainer);
        }
Example #10
0
        /// <summary>
        /// Create a MNIST trainer (writing recognition) will be added to an environemnt.
        /// </summary>
        /// <param name="sigma">The sigma environemnt this trainer will be assigned to.</param>
        /// <returns>The newly created trainer.</returns>
        private static ITrainer CreateMnistTrainer(SigmaEnvironment sigma)
        {
            ByteRecordReader mnistImageReader    = new ByteRecordReader(headerLengthBytes: 16, recordSizeBytes: 28 * 28, source: new CompressedSource(new MultiSource(new FileSource("train-images-idx3-ubyte.gz"), new UrlSource("http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz"))));
            IRecordExtractor mnistImageExtractor = mnistImageReader.Extractor("inputs", new[] { 0L, 0L }, new[] { 28L, 28L }).Preprocess(new NormalisingPreprocessor(0, 255));

            ByteRecordReader mnistTargetReader    = new ByteRecordReader(headerLengthBytes: 8, recordSizeBytes: 1, source: new CompressedSource(new MultiSource(new FileSource("train-labels-idx1-ubyte.gz"), new UrlSource("http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz"))));
            IRecordExtractor mnistTargetExtractor = mnistTargetReader.Extractor("targets", new[] { 0L }, new[] { 1L }).Preprocess(new OneHotPreprocessor(minValue: 0, maxValue: 9));

            IDataset dataset = new Dataset("mnist-training", Dataset.BlockSizeAuto, mnistImageExtractor, mnistTargetExtractor);
            ITrainer trainer = sigma.CreateTrainer("test");

            trainer.Network = new Network
            {
                Architecture = InputLayer.Construct(28, 28)
                               + 2 * FullyConnectedLayer.Construct(28 * 28)
                               + FullyConnectedLayer.Construct(10)
                               + OutputLayer.Construct(10)
                               + SoftMaxCrossEntropyCostLayer.Construct()
            };

            trainer.TrainingDataIterator = new MinibatchIterator(8, dataset);
            trainer.Optimiser            = new AdagradOptimiser(baseLearningRate: 0.02);
            trainer.Operator             = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.05f));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.01f, mean: 0.03f));

            trainer.AddGlobalHook(new CurrentEpochIterationReporter(TimeStep.Every(1, TimeScale.Iteration)));

            return(trainer);
        }
Example #11
0
        public static ITrainer CreateTicTacToeTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.TicTacToe();

            ITrainer trainer = sigma.CreateTrainer("tictactoe-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(9)
                                           + FullyConnectedLayer.Construct(72, "tanh")
                                           + FullyConnectedLayer.Construct(99, "tanh")
                                           + FullyConnectedLayer.Construct(3, "tanh")
                                           + OutputLayer.Construct(3)
                                           + SoftMaxCrossEntropyCostLayer.Construct();

            trainer.TrainingDataIterator = new MinibatchIterator(21, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new MomentumGradientOptimiser(learningRate: 0.01, momentum: 0.9);
            trainer.Operator  = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: new[] { 1, 2 }));

            trainer.AddGlobalHook(new DiskSaviorHook <INetwork>(TimeStep.Every(1, TimeScale.Epoch), "network.self", Namers.Static("tictactoe.sgnet"), verbose: true)
                                  .On(new ExtremaCriteria("shared.classification_accuracy_top1", ExtremaTarget.Max)));

            return(trainer);
        }
Example #12
0
        private static ITrainer CreateParkinsonsTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Parkinsons();

            ITrainer trainer = sigma.CreateTrainer("parkinsons-trainer");

            trainer.Network = new Network
            {
                Architecture = InputLayer.Construct(22)
                               + FullyConnectedLayer.Construct(140)
                               + FullyConnectedLayer.Construct(20)
                               + FullyConnectedLayer.Construct(1)
                               + OutputLayer.Construct(1)
                               + SquaredDifferenceCostLayer.Construct()
            };

            trainer.TrainingDataIterator = new MinibatchIterator(10, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.01);
            trainer.Operator  = new CpuSinglethreadedOperator(new DebugHandler(new CpuFloat32Handler()));

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new UniClassificationAccuracyReporter("validation", 0.5, TimeStep.Every(1, TimeScale.Epoch)));

            return(trainer);
        }
        public unsafe void FullyConnectedBackwardData()
        {
            FullyConnectedLayer fc = new FullyConnectedLayer(TensorInfo.Linear(231), 125, ActivationType.Sigmoid, WeightsInitializationMode.GlorotUniform, BiasInitializationMode.Gaussian);
            Tensor dy = CreateRandomTensor(400, fc.OutputInfo.Size);

            fixed(float *pw = fc.Weights, pb = fc.Biases)
            {
                Tensor.Reshape(pw, fc.InputInfo.Size, fc.OutputInfo.Size, out Tensor w);
                Tensor.Reshape(pb, 1, fc.OutputInfo.Size, out Tensor b);
                Tensor.New(dy.Entities, fc.InputInfo.Size, out Tensor dx1);
                CpuDnn.FullyConnectedBackwardData(w, dy, dx1);
                Gpu gpu = Gpu.Default;

                using (DeviceMemory <float>
                       dy_gpu = gpu.AllocateDevice(dy),
                       w_gpu = gpu.AllocateDevice(w),
                       dx_gpu = gpu.AllocateDevice <float>(dx1.Size))
                {
                    Dnn.Get(gpu).FullyConnectedBackwardData(dy.Entities, fc.InputInfo.Size, fc.OutputInfo.Size, dy_gpu.Ptr, w_gpu.Ptr, dx_gpu.Ptr);
                    dx_gpu.CopyToHost(dx1.Entities, dx1.Length, out Tensor dx2);
                    Assert.IsTrue(dx1.ContentEquals(dx2));
                    Tensor.Free(dy, dx1, dx2);
                }
            }
        }
Example #14
0
        private static void SampleXor()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("logical");

            sigma.SetRandomSeed(0);
            sigma.Prepare();

            RawDataset dataset = new RawDataset("xor");

            dataset.AddRecords("inputs", new[] { 0, 0 }, new[] { 0, 1 }, new[] { 1, 0 }, new[] { 1, 1 });
            dataset.AddRecords("targets", new[] { 0 }, new[] { 0 }, new[] { 0 }, new[] { 1 });

            ITrainer trainer = sigma.CreateTrainer("xor-trainer");

            trainer.Network.Architecture = InputLayer.Construct(2) + FullyConnectedLayer.Construct(2) + FullyConnectedLayer.Construct(1) + OutputLayer.Construct(1) + SquaredDifferenceCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(1, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.1);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new StopTrainingHook(atEpoch: 10000));
            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), averageValues: true));
            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Stop), averageValues: true));
            trainer.AddLocalHook(new ValueReporter("network.layers.*<external_output>._outputs.default.activations", TimeStep.Every(1, TimeScale.Stop)));
            trainer.AddLocalHook(new ValueReporter("network.layers.*-fullyconnected.weights", TimeStep.Every(1, TimeScale.Stop)));
            trainer.AddLocalHook(new ValueReporter("network.layers.*-fullyconnected.biases", TimeStep.Every(1, TimeScale.Stop)));

            sigma.Run();
        }
Example #15
0
        public void CloneTest()
        {
            Shape shape = new Shape(new int[] { -1, 20, 20, 10 });
            FullyConnectedLayer layer1 = new FullyConnectedLayer(shape, 100, MatrixLayout.ColumnMajor, null);
            FullyConnectedLayer layer2 = layer1.Clone() as FullyConnectedLayer;

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
Example #16
0
        public void CopyConstructorTest1()
        {
            Shape shape = new Shape(new int[] { -1, 20, 20, 10 });
            FullyConnectedLayer layer1 = new FullyConnectedLayer(shape, 100, MatrixLayout.ColumnMajor, null);
            FullyConnectedLayer layer2 = new FullyConnectedLayer(layer1);

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
        public void FullyConnectedBackward()
        {
            FullyConnectedLayer
                cpu = new FullyConnectedLayer(TensorInfo.Linear(250), 127, ActivationType.LeCunTanh, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnFullyConnectedLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases, cpu.ActivationType);

            TestBackward(cpu, gpu, 400);
        }
        public void FullyConnectedForward()
        {
            float[,] x = WeightsProvider.NewFullyConnectedWeights(TensorInfo.Linear(400), 250, WeightsInitializationMode.GlorotNormal).AsSpan().AsMatrix(400, 250);
            FullyConnectedLayer
                cpu = new FullyConnectedLayer(TensorInfo.Linear(250), 127, ActivationFunctionType.LeCunTanh, WeightsInitializationMode.GlorotNormal, BiasInitializationMode.Gaussian),
                gpu = new CuDnnFullyConnectedLayer(cpu.InputInfo, cpu.OutputInfo.Size, cpu.Weights, cpu.Biases, cpu.ActivationFunctionType);

            TestForward(cpu, gpu, x);
        }
Example #19
0
        private static NeuralNetwork InitializeNeuralNetwork(int seed)
        {
            Random random = new Random(seed == 0 ? new Random().Next() : seed);

            float RandomWeight() => (float)(random.NextDouble() * 2 - 1);

            Layer prevLayer;

            InputLayer li = new InputLayer(28, 28);

            prevLayer = li;

            ConvolutionalLayer l0 = new ConvolutionalLayer(15, 5, 1, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l0;
            prevLayer.InitializeWeights(RandomWeight);

            MaxPoolingLayer l1 = new MaxPoolingLayer(2, 2, prevLayer);

            prevLayer = l1;

            ConvolutionalLayer l2 = new ConvolutionalLayer(30, 4, 1, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l2;
            prevLayer.InitializeWeights(RandomWeight);

            MaxPoolingLayer l3 = new MaxPoolingLayer(3, 2, prevLayer);

            prevLayer = l3;

            ConvolutionalLayer l4 = new ConvolutionalLayer(45, 2, 2, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l4;
            prevLayer.InitializeWeights(RandomWeight);

            MaxPoolingLayer l5 = new MaxPoolingLayer(2, 1, prevLayer);

            prevLayer = l5;

            FullyConnectedLayer l6 = new FullyConnectedLayer(64, prevLayer, ActivationFunctions.Sigmoid(1));

            prevLayer = l6;
            prevLayer.InitializeWeights(RandomWeight);

            FullyConnectedLayer l7 = new FullyConnectedLayer(32, prevLayer, ActivationFunctions.Sigmoid(1));

            prevLayer = l7;
            prevLayer.InitializeWeights(RandomWeight);

            FullyConnectedLayer l8 = new FullyConnectedLayer(10, prevLayer, ActivationFunctions.SoftMax(1));

            prevLayer = l8;
            prevLayer.InitializeWeights(RandomWeight);

            return(new NeuralNetwork(li, l0, l1, l2, l3, l4, l5, l6, l7, l8));
        }
Example #20
0
        public void SerializeTest()
        {
            Shape shape = new Shape(new int[] { -1, 20, 20, 10 });
            FullyConnectedLayer layer1 = new FullyConnectedLayer(shape, 100, MatrixLayout.ColumnMajor, null);
            string s1 = JsonConvert.SerializeObject(layer1);
            FullyConnectedLayer layer2 = JsonConvert.DeserializeObject <FullyConnectedLayer>(s1);
            string s2 = JsonConvert.SerializeObject(layer2);

            Assert.AreEqual(s1, s2);
        }
Example #21
0
        public ActivationNetwork(IActivationFunction function, int inputsCount, params int[] neuronsCount)
        {
            var layersCount = Math.Max(1, neuronsCount.Length);

            Layers = new FullyConnectedLayer[layersCount];

            for (var i = 0; i < neuronsCount.Length; i++)
            {
                Layers[i] = new FullyConnectedLayer(neuronsCount[i], i == 0 ? inputsCount : neuronsCount[i - 1], function);
            }
        }
        public void ShouldProduceActivation()
        {
            _activationFunction.Activate(Arg.Is <double>(x => x == 21)).Returns(1);
            _activationFunction.Activate(Arg.Is <double>(x => x == 26)).Returns(5);
            _activationFunction.Activate(Arg.Is <double>(x => x == 31)).Returns(3);

            var fullyConnectedLayer = new FullyConnectedLayer(_activationFunction, new double[] { 1, 2, 3, 3, 2, 1, 1, 2, 3 }, new double[] { 1, 2, 3 });

            fullyConnectedLayer.Produce(new double[] { 3, 4, 5 });
            fullyConnectedLayer.Activations.Should().NotBeNull();
            fullyConnectedLayer.Activations.Should().ContainInOrder(new double[] { 1, 5, 3 });
            _activationFunction.Received(3).Activate(Arg.Any <double>());
        }
Example #23
0
        private static void SampleNetworkArchitecture()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("test");

            IComputationHandler handler = new CpuFloat32Handler();
            ITrainer            trainer = sigma.CreateTrainer("test_trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(2, 2) +
                                           ElementwiseLayer.Construct(2 * 2) +
                                           FullyConnectedLayer.Construct(2) +
                                           2 * (FullyConnectedLayer.Construct(4) + FullyConnectedLayer.Construct(2)) +
                                           OutputLayer.Construct(2);
            trainer.Network = (INetwork)trainer.Network.DeepCopy();

            trainer.Operator = new CpuMultithreadedOperator(10);

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.1f));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.01f, mean: 0.03f));
            trainer.Initialise(handler);

            trainer.Network = (INetwork)trainer.Network.DeepCopy();

            Console.WriteLine(trainer.Network.Registry);

            IRegistryResolver resolver = new RegistryResolver(trainer.Network.Registry);

            Console.WriteLine("===============");
            object[] weights = resolver.ResolveGet <object>("layers.*.weights");
            Console.WriteLine(string.Join("\n", weights));
            Console.WriteLine("===============");



            //foreach (ILayerBuffer buffer in trainer.Network.YieldLayerBuffersOrdered())
            //{
            //      Console.WriteLine(buffer.Layer.Name + ": ");

            //      Console.WriteLine("inputs:");
            //      foreach (string input in buffer.Inputs.Keys)
            //      {
            //              Console.WriteLine($"\t{input}: {buffer.Inputs[input].GetHashCode()}");
            //      }

            //      Console.WriteLine("outputs:");
            //      foreach (string output in buffer.Outputs.Keys)
            //      {
            //              Console.WriteLine($"\t{output}: {buffer.Outputs[output].GetHashCode()}");
            //      }
            //}
        }
        public void ShouldInitializeWhenValuesProvided()
        {
            var fullyConnectedLayer = new FullyConnectedLayer(_activationFunction, new double[] { 1, 2, 3, 3, 2, 1, 1, 2, 3 }, new double[] { 1, 2, 3 });

            fullyConnectedLayer.ActivationFunction.Should().NotBeNull();
            fullyConnectedLayer.SynapsesWeights.Should().NotBeNull();
            fullyConnectedLayer.SynapsesWeights.Should().ContainInOrder(new double[] { 1, 2, 3, 3, 2, 1, 1, 2, 3 });
            fullyConnectedLayer.NeuronsBiases.Should().NotBeNull();
            fullyConnectedLayer.NeuronsBiases.Should().ContainInOrder(new double[] { 1, 2, 3 });
            fullyConnectedLayer.Outputs.Should().NotBeNull();
            fullyConnectedLayer.Outputs.Should().ContainInOrder(new double[] { 0, 0, 0 });
            fullyConnectedLayer.Activations.Should().NotBeNull();
            fullyConnectedLayer.Activations.Should().ContainInOrder(new double[] { 0, 0, 0 });
        }
Example #25
0
        private static void SampleIris()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("iris");

            sigma.SetRandomSeed(0);

            sigma.Prepare();

            IDataset dataset = Defaults.Datasets.Iris();

            ITrainer trainer = sigma.CreateGhostTrainer("iris-trainer");

            trainer.Network.Architecture = InputLayer.Construct(4)
                                           + FullyConnectedLayer.Construct(12)
                                           + FullyConnectedLayer.Construct(3)
                                           + OutputLayer.Construct(3)
                                           + SquaredDifferenceCostLayer.Construct();
            //trainer.Network = Serialisation.ReadBinaryFileIfExists("iris.sgnet", trainer.Network);

            trainer.TrainingDataIterator = new MinibatchIterator(50, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.06);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            //trainer.AddGlobalHook(new StopTrainingHook(atEpoch: 100));
            //trainer.AddLocalHook(new EarlyStopperHook("optimiser.cost_total", 20, target: ExtremaTarget.Min));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), reportEpochIteration: true));
            //.On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));
            //trainer.AddLocalHook(new DiskSaviorHook<INetwork>("network.self", Namers.Dynamic("iris_epoch{0}.sgnet", "epoch"), verbose: true)
            //    .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: 1));
            trainer.AddHook(new StopTrainingHook(new ThresholdCriteria("shared.classification_accuracy_top1", ComparisonTarget.GreaterThanEquals, 0.98)));

            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(599, TimeScale.Iteration), 128));
            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(1, TimeScale.Epoch), 4));

            //Serialisation.WriteBinaryFile(trainer, "trainer.sgtrainer");
            //trainer = Serialisation.ReadBinaryFile<ITrainer>("trainer.sgtrainer");

            sigma.AddTrainer(trainer);

            sigma.AddMonitor(new HttpMonitor("http://+:8080/sigma/"));

            sigma.PrepareAndRun();
        }
Example #26
0
        private static void SampleMnist()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("mnist");

            sigma.SetRandomSeed(0);

            IDataset dataset = Defaults.Datasets.Mnist();

            ITrainer trainer = sigma.CreateTrainer("mnist-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(28, 28)
                                           + DropoutLayer.Construct(0.2)
                                           + FullyConnectedLayer.Construct(1000, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(800, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(10, activation: "sigmoid")
                                           + OutputLayer.Construct(10)
                                           + SoftMaxCrossEntropyCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(100, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(Defaults.Datasets.MnistValidation()));
            //trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.01);
            //trainer.Optimiser = new MomentumGradientOptimiser(learningRate: 0.01, momentum: 0.9);
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.02);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.1));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new ValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Iteration), reportEpochIteration: true)
                                 .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            var validationTimeStep = TimeStep.Every(1, TimeScale.Epoch);

            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", validationTimeStep, tops: new[] { 1, 2, 3 }));

            for (int i = 0; i < 10; i++)
            {
                trainer.AddGlobalHook(new TargetMaximisationReporter(trainer.Operator.Handler.NDArray(ArrayUtils.OneHot(i, 10), 10), TimeStep.Every(1, TimeScale.Epoch)));
            }

            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(10, TimeScale.Iteration), 32));
            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(1, TimeScale.Epoch), 4));
            trainer.AddHook(new StopTrainingHook(atEpoch: 10));

            sigma.PrepareAndRun();
        }
Example #27
0
        public void ArchitectureConstructorTest2()
        {
            string architecture = "100NN";

            try
            {
                FullyConnectedLayer layer = new FullyConnectedLayer(new Shape(new int[] { -1, 20, 20, 10 }), architecture, null);
            }
            catch (ArgumentException e)
            {
                Assert.AreEqual(
                    new ArgumentException(string.Format(CultureInfo.InvariantCulture, Properties.Resources.E_InvalidLayerArchitecture, architecture), nameof(architecture)).Message,
                    e.Message);
                throw;
            }
        }
        internal static INetworkLayer CpuLayerDeserialize([NotNull] Stream stream, LayerType type)
        {
            switch (type)
            {
            case LayerType.FullyConnected: return(FullyConnectedLayer.Deserialize(stream));

            case LayerType.Convolutional: return(ConvolutionalLayer.Deserialize(stream));

            case LayerType.Pooling: return(PoolingLayer.Deserialize(stream));

            case LayerType.Output: return(OutputLayer.Deserialize(stream));

            case LayerType.Softmax: return(SoftmaxLayer.Deserialize(stream));

            default: throw new ArgumentOutOfRangeException(nameof(type), $"The {type} layer type is not supported by the default deserializer");
            }
        }
Example #29
0
            public void SetUp()
            {
                var mock = new Mock <IWeightInitializer>();

                var queue = new Queue <double>(new double[] {
                    0.15, 0.75, -0.33, 0.4, 0.1,
                    -0.55, -1.09, 2.03, 0.02, 0.42,
                    1.23, -2.93, 0.56, -0.98, -0.55
                });

                mock
                .Setup(q => q.GenerateRandom(It.IsAny <double>()))
                .Returns(queue.Dequeue);

                _layer = new FullyConnectedLayer(
                    new LogisticActivator(), 3, 5, 1, mock.Object, LearningRateAnnealers.LearningRateAnnealerType.Adagrad);
            }
Example #30
0
    List <string> weightsToSave = new List <string>(); //all the weighs we'll be saving in string format

    //constructor setting up layers
    public void CreateNetwork(int[] layer)
    {
        //deep copy layers
        this.layer = new int[layer.Length];
        for (int i = 0; i < layer.Length; i++)
        {
            this.layer[i] = layer[i];
        }

        //creates neural layers
        layers = new FullyConnectedLayer[layer.Length - 1];

        for (int i = 0; i < layers.Length; i++)
        {
            layers[i] = new FullyConnectedLayer(layer[i], layer[i + 1]);
        }
    }
Example #31
0
        public void ArchitectureConstructorTest1()
        {
            FullyConnectedLayer layer = new FullyConnectedLayer(new Shape(new int[] { -1, 10, 12, 3 }), "100N", null);

            Assert.AreEqual(100, layer.NumberOfNeurons);
            Assert.AreEqual("100N", layer.Architecture);

            CollectionAssert.AreEqual(new[] { -1, 100 }, layer.OutputShape.Axes);
            Assert.AreEqual(1, layer.NumberOfOutputs);
            Assert.AreEqual(MatrixLayout.RowMajor, layer.MatrixLayout);

            CollectionAssert.AreEqual(new[] { 100, 10 * 12 * 3 }, layer.W.Axes);
            Assert.IsFalse(layer.W.Weights.Take(layer.W.Length).All(x => x == 0.0f));
            Assert.AreEqual(0.0, layer.W.Weights.Take(layer.W.Length).Average(), 0.01f);

            CollectionAssert.AreEqual(new[] { 100 }, layer.B.Axes);
            Assert.IsTrue(layer.B.Weights.Take(layer.B.Length).All(x => x == 0.0f));
        }
Example #32
0
        public void SerializeTest1()
        {
            NetworkGraph graph = new NetworkGraph();

            Layer layer1 = new FullyConnectedLayer(new Shape(new[] { 1, 2, 3, 1 }), 5, MatrixLayout.ColumnMajor, null);
            Layer layer2 = new FullyConnectedLayer(new Shape(new[] { 2, 3, 4, 1 }), 6, MatrixLayout.ColumnMajor, null);

            Edge <Layer> edge1 = new Edge <Layer>(layer1, layer2);
            Edge <Layer> edge2 = new Edge <Layer>(layer1, layer2);

            graph.AddEdges(new Edge <Layer>[] { edge1, edge2 });

            string s1 = graph.SaveToString();

            NetworkGraph graph2 = NetworkGraph.FromString(s1);

            string s2 = graph2.SaveToString();

            Assert.AreEqual(s1, s2);
        }
Example #33
0
        public static NeuralNetwork<Vector> Create(IDataSet<Vector, Vector> dataSet, IActivator activator, List<int> hiddenSizes)
        {
            if (hiddenSizes.Count == 0)
                return Create(dataSet, activator);

            var inputSize  = dataSet.FirstInput.Size;
            var outputSize = dataSet.FirstOutput.Size;
            var sizes = new List<int>{inputSize};
            sizes.AddRange(hiddenSizes);
            sizes.Add(outputSize);

            var layerCount = sizes.Count - 1;
            var layers = new ISingleLayer<Vector, Vector>[layerCount];

            for (var i = 0; i < layerCount; ++i)
                layers[i] = new FullyConnectedLayer(sizes[i], sizes[i + 1], activator);

            var compositeLayer = LayerCompositor.ComposeGeteroneneous(layers);

            return new NeuralNetwork<Vector>(compositeLayer);
        }
Example #34
0
        public static NeuralNetwork<Matrix> CreateSemi(IDataSet<Matrix, Vector> dataSet)
        {
            var count  = 5;

            var a = new ISingleLayer<Matrix, Matrix>[count];
            for (var i = 0; i < count; ++i)
                a[i] = new MatrixConvolutor(28, 28, 24, 24, new Tanh());

            var b = new ISingleLayer<Matrix, Matrix>[count];
            for (var i = 0; i < count; ++i)
                b[i] = new MatrixSubsampler(24, 24, 12, 12, new Tanh());

            var splitter    = new Splitter<Matrix, Matrix>(a);
            var merger      = new MatrixMerger<Matrix>(b);

            var classif  = new FullyConnectedLayer(144 * count, 50, new Tanh());
            var classif2 = new FullyConnectedLayer(100, 10, new Tanh());

            var comp = CompositeLayer<Vector, Vector[], Vector>.Compose(splitter,
                                                                        merger,
                                                                        classif,
                                                                        classif2);

            return new NeuralNetwork<Matrix>(comp);
        }
Example #35
0
        public static NeuralNetwork<Matrix> CreateNorb(IDataSet<Matrix, Vector> dataSet)
        {
            var count  = 12;
            var branch = 5;

            var a = new ISingleLayer<Matrix, Matrix>[count];
            for (var i = 0; i < count; ++i)
                a[i] = new MatrixConvolutor(96, 96, 92, 92, new Tanh());

            var b = new ISingleLayer<Matrix, Matrix>[count];
            for (var i = 0; i < count; ++i)
                b[i] = new MatrixSubsampler(92, 92, 46, 46, new Tanh());

            var c = new ISingleLayer<Matrix, Matrix>[count];
            for (var i = 0; i < count; ++i)
                c[i] = new MatrixSubsampler(46, 46, 23, 23, new Tanh());

            var splitter    = new Splitter<Matrix, Matrix>(a);
            var applicator1 = new Applicator<Matrix, Matrix>(b);
            var merger      = new MatrixMerger<Matrix>(c);

            var classif  = new FullyConnectedLayer(23 * 23 * count, 5, new Tanh());

            var comp = CompositeLayer<Vector, Vector[], Vector>.Compose(splitter,
                                                                        applicator1,
                                                                        merger,
                                                                        classif
                                                                       );

            return new NeuralNetwork<Matrix>(comp);
        }