Пример #1
0
        private static ITrainer CreateIrisTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Iris();

            ITrainer trainer = sigma.CreateTrainer("iris-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(4)
                                           + FullyConnectedLayer.Construct(12)
                                           + FullyConnectedLayer.Construct(3)
                                           + OutputLayer.Construct(3)
                                           + SquaredDifferenceCostLayer.Construct();
            //trainer.Network = Serialisation.ReadBinaryFileIfExists("iris.sgnet", trainer.Network);

            trainer.TrainingDataIterator = new MinibatchIterator(50, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.06);
            trainer.Operator  = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            //trainer.AddGlobalHook(new StopTrainingHook(atEpoch: 100));
            //trainer.AddLocalHook(new EarlyStopperHook("optimiser.cost_total", 20, target: ExtremaTarget.Min));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), reportEpochIteration: true));
            //.On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));
            //trainer.AddLocalHook(new DiskSaviorHook<INetwork>("network.self", Namers.Dynamic("iris_epoch{0}.sgnet", "epoch"), verbose: true)
            //    .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: 1));

            return(trainer);
        }
Пример #2
0
        /// <summary>
        /// Create a MNIST trainer (writing recognition) that will be added to an environemnt.
        /// </summary>
        /// <param name="sigma">The sigma environemnt this trainer will be assigned to.</param>
        /// <returns>The newly created trainer.</returns>
        private static ITrainer CreateMnistTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Mnist();

            ITrainer trainer = sigma.CreateTrainer("mnist-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(28, 28)
                                           + DropoutLayer.Construct(0.2)
                                           + FullyConnectedLayer.Construct(1000, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(800, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(10, activation: "sigmoid")
                                           + OutputLayer.Construct(10)
                                           + SoftMaxCrossEntropyCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(100, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(Defaults.Datasets.MnistValidation()));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.02);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.1));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new ValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Iteration), reportEpochIteration: true)
                                 .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(1, TimeScale.Epoch), 4));

            return(trainer);
        }
Пример #3
0
        private static ITrainer CreateParkinsonsTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Parkinsons();

            ITrainer trainer = sigma.CreateTrainer("parkinsons-trainer");

            trainer.Network = new Network
            {
                Architecture = InputLayer.Construct(22)
                               + FullyConnectedLayer.Construct(140)
                               + FullyConnectedLayer.Construct(20)
                               + FullyConnectedLayer.Construct(1)
                               + OutputLayer.Construct(1)
                               + SquaredDifferenceCostLayer.Construct()
            };

            trainer.TrainingDataIterator = new MinibatchIterator(10, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.01);
            trainer.Operator  = new CpuSinglethreadedOperator(new DebugHandler(new CpuFloat32Handler()));

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new UniClassificationAccuracyReporter("validation", 0.5, TimeStep.Every(1, TimeScale.Epoch)));

            return(trainer);
        }
Пример #4
0
        public static ITrainer CreateTicTacToeTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.TicTacToe();

            ITrainer trainer = sigma.CreateTrainer("tictactoe-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(9)
                                           + FullyConnectedLayer.Construct(72, "tanh")
                                           + FullyConnectedLayer.Construct(99, "tanh")
                                           + FullyConnectedLayer.Construct(3, "tanh")
                                           + OutputLayer.Construct(3)
                                           + SoftMaxCrossEntropyCostLayer.Construct();

            trainer.TrainingDataIterator = new MinibatchIterator(21, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new MomentumGradientOptimiser(learningRate: 0.01, momentum: 0.9);
            trainer.Operator  = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: new[] { 1, 2 }));

            trainer.AddGlobalHook(new DiskSaviorHook <INetwork>(TimeStep.Every(1, TimeScale.Epoch), "network.self", Namers.Static("tictactoe.sgnet"), verbose: true)
                                  .On(new ExtremaCriteria("shared.classification_accuracy_top1", ExtremaTarget.Max)));

            return(trainer);
        }
Пример #5
0
        public void TestUnifiedIteratorYield()
        {
            string filename = ".unittestfile" + nameof(TestUnifiedIteratorYield);

            CreateCsvTempFile(filename);
            SigmaEnvironment.Clear();


            FileSource         source    = new FileSource(filename, Path.GetTempPath());
            CsvRecordExtractor extractor = (CsvRecordExtractor) new CsvRecordReader(source).Extractor(new CsvRecordExtractor(new Dictionary <string, int[][]> {
                ["inputs"] = new[] { new[] { 0 } }
            }));
            ExtractedDataset    dataset  = new ExtractedDataset("test", 2, new DiskCacheProvider(Path.GetTempPath() + "/" + nameof(TestUnifiedIteratorYield)), true, extractor);
            UnifiedIterator     iterator = new UnifiedIterator(dataset);
            SigmaEnvironment    sigma    = SigmaEnvironment.Create("test");
            IComputationHandler handler  = new CpuFloat32Handler();

            foreach (var block in iterator.Yield(handler, sigma))
            {
                Assert.AreEqual(new[] { 5.1f, 4.9f, 4.7f }, block["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 3));
            }

            dataset.Dispose();

            DeleteTempFile(filename);
        }
Пример #6
0
        private static void SampleParkinsons()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("parkinsons");

            IDataset dataset = Defaults.Datasets.Parkinsons();

            ITrainer trainer = sigma.CreateGhostTrainer("parkinsons-trainer");

            trainer.Network.Architecture = InputLayer.Construct(22)
                                           + FullyConnectedLayer.Construct(140)
                                           + FullyConnectedLayer.Construct(20)
                                           + FullyConnectedLayer.Construct(1)
                                           + OutputLayer.Construct(1)
                                           + SquaredDifferenceCostLayer.Construct();

            trainer.TrainingDataIterator = new MinibatchIterator(10, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.01);

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new UniClassificationAccuracyReporter("validation", 0.5, TimeStep.Every(1, TimeScale.Epoch)));

            sigma.AddTrainer(trainer);

            sigma.PrepareAndRun();
        }
Пример #7
0
        private static void SampleWdbc()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("wdbc");

            IDataset dataset = Defaults.Datasets.Wdbc();

            ITrainer trainer = sigma.CreateGhostTrainer("wdbc-trainer");

            trainer.Network.Architecture = InputLayer.Construct(30)
                                           + FullyConnectedLayer.Construct(42)
                                           + FullyConnectedLayer.Construct(24)
                                           + FullyConnectedLayer.Construct(1)
                                           + OutputLayer.Construct(1)
                                           + SquaredDifferenceCostLayer.Construct();

            trainer.TrainingDataIterator = new MinibatchIterator(72, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.005);

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new UniClassificationAccuracyReporter("validation", 0.5, TimeStep.Every(1, TimeScale.Epoch)));

            sigma.AddTrainer(trainer);

            sigma.AddMonitor(new HttpMonitor("http://+:8080/sigma/"));

            sigma.PrepareAndRun();
        }
Пример #8
0
        /// <summary>
        /// Create a MNIST trainer (writing recognition) will be added to an environemnt.
        /// </summary>
        /// <param name="sigma">The sigma environemnt this trainer will be assigned to.</param>
        /// <returns>The newly created trainer.</returns>
        private static ITrainer CreateMnistTrainer(SigmaEnvironment sigma)
        {
            ByteRecordReader mnistImageReader    = new ByteRecordReader(headerLengthBytes: 16, recordSizeBytes: 28 * 28, source: new CompressedSource(new MultiSource(new FileSource("train-images-idx3-ubyte.gz"), new UrlSource("http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz"))));
            IRecordExtractor mnistImageExtractor = mnistImageReader.Extractor("inputs", new[] { 0L, 0L }, new[] { 28L, 28L }).Preprocess(new NormalisingPreprocessor(0, 255));

            ByteRecordReader mnistTargetReader    = new ByteRecordReader(headerLengthBytes: 8, recordSizeBytes: 1, source: new CompressedSource(new MultiSource(new FileSource("train-labels-idx1-ubyte.gz"), new UrlSource("http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz"))));
            IRecordExtractor mnistTargetExtractor = mnistTargetReader.Extractor("targets", new[] { 0L }, new[] { 1L }).Preprocess(new OneHotPreprocessor(minValue: 0, maxValue: 9));

            IDataset dataset = new Dataset("mnist-training", Dataset.BlockSizeAuto, mnistImageExtractor, mnistTargetExtractor);
            ITrainer trainer = sigma.CreateTrainer("test");

            trainer.Network = new Network
            {
                Architecture = InputLayer.Construct(28, 28)
                               + 2 * FullyConnectedLayer.Construct(28 * 28)
                               + FullyConnectedLayer.Construct(10)
                               + OutputLayer.Construct(10)
                               + SoftMaxCrossEntropyCostLayer.Construct()
            };

            trainer.TrainingDataIterator = new MinibatchIterator(8, dataset);
            trainer.Optimiser            = new AdagradOptimiser(baseLearningRate: 0.02);
            trainer.Operator             = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.05f));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.01f, mean: 0.03f));

            trainer.AddGlobalHook(new CurrentEpochIterationReporter(TimeStep.Every(1, TimeScale.Iteration)));

            return(trainer);
        }
Пример #9
0
        private static void SampleHutter()
        {
            const long timeWindowSize = 10L;

            SigmaEnvironment sigma = SigmaEnvironment.Create("recurrent");

            IDataSource      source    = new MultiSource(new FileSource("enwik8"), new CompressedSource(new MultiSource(new FileSource("enwik8.zip"), new UrlSource("http://mattmahoney.net/dc/enwik8.zip"))));
            IRecordExtractor extractor = new CharacterRecordReader(source, (int)(timeWindowSize + 1), Encoding.ASCII)
                                         .Extractor(new ArrayRecordExtractor <short>(ArrayRecordExtractor <short>
                                                                                     .ParseExtractorParameters("inputs", new[] { 0L }, new[] { timeWindowSize }, "targets", new[] { 0L }, new[] { timeWindowSize }))
                                                    .Offset("targets", 1L))
                                         .Preprocess(new PermutePreprocessor(0, 2, 1))
                                         .Preprocess(new OneHotPreprocessor(0, 255));
            IDataset dataset = new ExtractedDataset("hutter", ExtractedDataset.BlockSizeAuto, false, extractor);

            ITrainer trainer = sigma.CreateTrainer("hutter");

            trainer.Network.Architecture = InputLayer.Construct(256) + RecurrentLayer.Construct(256) + OutputLayer.Construct(256) + SoftMaxCrossEntropyCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(32, dataset);
            trainer.AddNamedDataIterator("validation", new MinibatchIterator(100, dataset));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.07);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Iteration), averageValues: true));
            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(10, TimeScale.Iteration)));

            sigma.PrepareAndRun();
        }
Пример #10
0
        private static void SampleXor()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("logical");

            sigma.SetRandomSeed(0);
            sigma.Prepare();

            RawDataset dataset = new RawDataset("xor");

            dataset.AddRecords("inputs", new[] { 0, 0 }, new[] { 0, 1 }, new[] { 1, 0 }, new[] { 1, 1 });
            dataset.AddRecords("targets", new[] { 0 }, new[] { 0 }, new[] { 0 }, new[] { 1 });

            ITrainer trainer = sigma.CreateTrainer("xor-trainer");

            trainer.Network.Architecture = InputLayer.Construct(2) + FullyConnectedLayer.Construct(2) + FullyConnectedLayer.Construct(1) + OutputLayer.Construct(1) + SquaredDifferenceCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(1, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.1);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new StopTrainingHook(atEpoch: 10000));
            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), averageValues: true));
            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Stop), averageValues: true));
            trainer.AddLocalHook(new ValueReporter("network.layers.*<external_output>._outputs.default.activations", TimeStep.Every(1, TimeScale.Stop)));
            trainer.AddLocalHook(new ValueReporter("network.layers.*-fullyconnected.weights", TimeStep.Every(1, TimeScale.Stop)));
            trainer.AddLocalHook(new ValueReporter("network.layers.*-fullyconnected.biases", TimeStep.Every(1, TimeScale.Stop)));

            sigma.Run();
        }
Пример #11
0
        public void TestSigmaEnvironmentCreate()
        {
            SigmaEnvironment.Clear();

            SigmaEnvironment sigma = SigmaEnvironment.Create("test");

            Assert.AreEqual("test", sigma.Name);
        }
Пример #12
0
        public void TestSigmaEnvironmentAlreadyCreated()
        {
            SigmaEnvironment.Clear();

            SigmaEnvironment.Create("test");

            Assert.Throws <ArgumentException>(() => SigmaEnvironment.Create("test"));
        }
Пример #13
0
        private static void Main()
        {
            SigmaEnvironment.EnableLogging();
            SigmaEnvironment sigma = SigmaEnvironment.Create("Sigma-IRIS");


            ITrainer trainer = CreateIrisTrainer(sigma);
        }
Пример #14
0
        private static void Main(string[] args)
        {
            SigmaEnvironment.EnableLogging(xml: true);
            SigmaEnvironment.Globals["web_proxy"] = WebUtils.GetProxyFromFileOrDefault(".customproxy");

            SampleMnist();

            Console.WriteLine("Program ended, waiting for termination, press any key...");
            Console.ReadKey();
        }
Пример #15
0
        /// <summary>
        /// Default constructor for <see cref="ISynchronisationHandler"/>.
        /// </summary>
        /// <param name="sigma">The <see cref="SigmaEnvironment"/> this <see cref="ISynchronisationHandler"/> is
        /// associated with. May not be <c>null</c>.</param>
        public SynchronisationHandler(SigmaEnvironment sigma)
        {
            if (sigma == null)
            {
                throw new ArgumentNullException(nameof(sigma));
            }

            RegistryResolvers = new Dictionary <IRegistry, IRegistryResolver>();
            Sources           = new List <ISynchronisationSource>();

            Sigma = sigma;
        }
Пример #16
0
        protected void CheckNotNull(IComputationHandler handler, SigmaEnvironment environment)
        {
            if (handler == null)
            {
                throw new ArgumentNullException(nameof(handler));
            }

            if (environment == null)
            {
                throw new ArgumentNullException(nameof(environment));
            }
        }
Пример #17
0
        protected MockTrainer(string name) : base(name)
        {
            Network = new Network("test");
            Network.Architecture = new LinearNetworkArchitecture(MockLayer.Construct());
            Optimiser            = new GradientDescentOptimiser(0.0);
            IRecordExtractor extractor = new MockRecordExtractor();

            extractor.SectionNames = new[] { "targets", "inputs" };
            extractor.Reader       = new MockRecordReader();
            Sigma = SigmaEnvironment.GetOrCreate("testificate-mocktrainer");
            TrainingDataIterator = new UndividedIterator(new ExtractedDataset("testificate", extractor));
        }
Пример #18
0
        private static void SampleLoadExtractIterate()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("test");

            sigma.Prepare();

            //var irisReader = new CsvRecordReader(new MultiSource(new FileSource("iris.data"), new UrlSource("http://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data")));
            //IRecordExtractor irisExtractor = irisReader.Extractor("inputs2", new[] { 0, 3 }, "targets2", 4).AddValueMapping(4, "Iris-setosa", "Iris-versicolor", "Iris-virginica");
            //irisExtractor = irisExtractor.Preprocess(new OneHotPreprocessor(sectionName: "targets2", minValue: 0, maxValue: 2), new NormalisingPreprocessor(sectionNames: "inputs2", minInputValue: 0, maxInputValue: 6));

            ByteRecordReader mnistImageReader    = new ByteRecordReader(headerLengthBytes: 16, recordSizeBytes: 28 * 28, source: new CompressedSource(new MultiSource(new FileSource("train-images-idx3-ubyte.gz"), new UrlSource("http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz"))));
            IRecordExtractor mnistImageExtractor = mnistImageReader.Extractor("inputs", new[] { 0L, 0L }, new[] { 28L, 28L }).Preprocess(new NormalisingPreprocessor(0, 255));

            ByteRecordReader mnistTargetReader    = new ByteRecordReader(headerLengthBytes: 8, recordSizeBytes: 1, source: new CompressedSource(new MultiSource(new FileSource("train-labels-idx1-ubyte.gz"), new UrlSource("http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz"))));
            IRecordExtractor mnistTargetExtractor = mnistTargetReader.Extractor("targets", new[] { 0L }, new[] { 1L }).Preprocess(new OneHotPreprocessor(minValue: 0, maxValue: 9));

            IComputationHandler handler = new CpuFloat32Handler();

            ExtractedDataset dataset = new ExtractedDataset("mnist-training", ExtractedDataset.BlockSizeAuto, mnistImageExtractor, mnistTargetExtractor);

            IDataset[] slices         = dataset.SplitRecordwise(0.8, 0.2);
            IDataset   trainingData   = slices[0];
            IDataset   validationData = slices[1];

            MinibatchIterator trainingIterator   = new MinibatchIterator(1, trainingData);
            MinibatchIterator validationIterator = new MinibatchIterator(1, validationData);

            while (true)
            {
                foreach (var block in trainingIterator.Yield(handler, sigma))
                {
                    Thread.Sleep(100);

                    PrintFormattedBlock(block, PrintUtils.AsciiGreyscalePalette);

                    Thread.Sleep(1000);
                }
            }

            //Random random = new Random();
            //INDArray array = new ADNDArray<float>(3, 1, 2, 2);

            //new GaussianInitialiser(0.05, 0.05).Initialise(array, Handler, random);

            //Console.WriteLine(array);

            //new ConstantValueInitialiser(1).Initialise(array, Handler, random);

            //Console.WriteLine(array);

            //dataset.InvalidateAndClearCaches();
        }
Пример #19
0
        public void TestWPFMonitorCreation()
        {
            SigmaEnvironment sigma = ClearAndCreate("Test");

            WPFMonitor monitor = sigma.AddMonitor(new WPFMonitor("Sigma GUI Demo"));

            monitor.Priority = ThreadPriority.Lowest;

            Assert.AreSame(sigma, monitor.Sigma);
            Assert.AreEqual(monitor.Priority, ThreadPriority.Lowest);
            Assert.AreEqual(monitor.Title, "Sigma GUI Demo");
            Assert.AreNotEqual(monitor.Title, "Sigma GUI Demo2");
        }
Пример #20
0
        private static void SampleNetworkArchitecture()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("test");

            IComputationHandler handler = new CpuFloat32Handler();
            ITrainer            trainer = sigma.CreateTrainer("test_trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(2, 2) +
                                           ElementwiseLayer.Construct(2 * 2) +
                                           FullyConnectedLayer.Construct(2) +
                                           2 * (FullyConnectedLayer.Construct(4) + FullyConnectedLayer.Construct(2)) +
                                           OutputLayer.Construct(2);
            trainer.Network = (INetwork)trainer.Network.DeepCopy();

            trainer.Operator = new CpuMultithreadedOperator(10);

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.1f));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.01f, mean: 0.03f));
            trainer.Initialise(handler);

            trainer.Network = (INetwork)trainer.Network.DeepCopy();

            Console.WriteLine(trainer.Network.Registry);

            IRegistryResolver resolver = new RegistryResolver(trainer.Network.Registry);

            Console.WriteLine("===============");
            object[] weights = resolver.ResolveGet <object>("layers.*.weights");
            Console.WriteLine(string.Join("\n", weights));
            Console.WriteLine("===============");



            //foreach (ILayerBuffer buffer in trainer.Network.YieldLayerBuffersOrdered())
            //{
            //      Console.WriteLine(buffer.Layer.Name + ": ");

            //      Console.WriteLine("inputs:");
            //      foreach (string input in buffer.Inputs.Keys)
            //      {
            //              Console.WriteLine($"\t{input}: {buffer.Inputs[input].GetHashCode()}");
            //      }

            //      Console.WriteLine("outputs:");
            //      foreach (string output in buffer.Outputs.Keys)
            //      {
            //              Console.WriteLine($"\t{output}: {buffer.Outputs[output].GetHashCode()}");
            //      }
            //}
        }
Пример #21
0
        private static CpuMultithreadedOperator CreateOperator()
        {
            SigmaEnvironment.Clear();
            RedirectGlobalsToTempPath();

            CpuMultithreadedOperator @operator = new CpuMultithreadedOperator(new CpuFloat32Handler(), 3, ThreadPriority.Normal);

            @operator.Trainer = new MockTrainer();
            @operator.Trainer.Initialise(@operator.Handler);
            @operator.Network = @operator.Trainer.Network;
            @operator.Sigma   = SigmaEnvironment.GetOrCreate("testificate-operatorcreate");

            return(@operator);
        }
Пример #22
0
        private static void SampleIris()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("iris");

            sigma.SetRandomSeed(0);

            sigma.Prepare();

            IDataset dataset = Defaults.Datasets.Iris();

            ITrainer trainer = sigma.CreateGhostTrainer("iris-trainer");

            trainer.Network.Architecture = InputLayer.Construct(4)
                                           + FullyConnectedLayer.Construct(12)
                                           + FullyConnectedLayer.Construct(3)
                                           + OutputLayer.Construct(3)
                                           + SquaredDifferenceCostLayer.Construct();
            //trainer.Network = Serialisation.ReadBinaryFileIfExists("iris.sgnet", trainer.Network);

            trainer.TrainingDataIterator = new MinibatchIterator(50, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.06);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            //trainer.AddGlobalHook(new StopTrainingHook(atEpoch: 100));
            //trainer.AddLocalHook(new EarlyStopperHook("optimiser.cost_total", 20, target: ExtremaTarget.Min));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), reportEpochIteration: true));
            //.On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));
            //trainer.AddLocalHook(new DiskSaviorHook<INetwork>("network.self", Namers.Dynamic("iris_epoch{0}.sgnet", "epoch"), verbose: true)
            //    .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: 1));
            trainer.AddHook(new StopTrainingHook(new ThresholdCriteria("shared.classification_accuracy_top1", ComparisonTarget.GreaterThanEquals, 0.98)));

            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(599, TimeScale.Iteration), 128));
            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(1, TimeScale.Epoch), 4));

            //Serialisation.WriteBinaryFile(trainer, "trainer.sgtrainer");
            //trainer = Serialisation.ReadBinaryFile<ITrainer>("trainer.sgtrainer");

            sigma.AddTrainer(trainer);

            sigma.AddMonitor(new HttpMonitor("http://+:8080/sigma/"));

            sigma.PrepareAndRun();
        }
Пример #23
0
        private static void SampleMnist()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("mnist");

            sigma.SetRandomSeed(0);

            IDataset dataset = Defaults.Datasets.Mnist();

            ITrainer trainer = sigma.CreateTrainer("mnist-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(28, 28)
                                           + DropoutLayer.Construct(0.2)
                                           + FullyConnectedLayer.Construct(1000, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(800, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(10, activation: "sigmoid")
                                           + OutputLayer.Construct(10)
                                           + SoftMaxCrossEntropyCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(100, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(Defaults.Datasets.MnistValidation()));
            //trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.01);
            //trainer.Optimiser = new MomentumGradientOptimiser(learningRate: 0.01, momentum: 0.9);
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.02);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.1));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new ValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Iteration), reportEpochIteration: true)
                                 .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            var validationTimeStep = TimeStep.Every(1, TimeScale.Epoch);

            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", validationTimeStep, tops: new[] { 1, 2, 3 }));

            for (int i = 0; i < 10; i++)
            {
                trainer.AddGlobalHook(new TargetMaximisationReporter(trainer.Operator.Handler.NDArray(ArrayUtils.OneHot(i, 10), 10), TimeStep.Every(1, TimeScale.Epoch)));
            }

            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(10, TimeScale.Iteration), 32));
            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(1, TimeScale.Epoch), 4));
            trainer.AddHook(new StopTrainingHook(atEpoch: 10));

            sigma.PrepareAndRun();
        }
Пример #24
0
        private static void Main()
        {
            SigmaEnvironment.EnableLogging();
            SigmaEnvironment sigma = SigmaEnvironment.Create("Sigma-MNIST");

            // create a new mnist trainer
            ITrainer trainer = CreateMnistTrainer(sigma);

            // for the UI we have to activate more features
            if (UI)
            {
                // create and attach a new UI framework
                WPFMonitor gui = sigma.AddMonitor(new WPFMonitor("MNIST"));

                // create a tab
                gui.AddTabs("Overview");

                // access the window inside the ui thread
                gui.WindowDispatcher(window =>
                {
                    // enable initialisation
                    window.IsInitializing = true;

                    // add a panel that controls the learning process
                    window.TabControl["Overview"].AddCumulativePanel(new ControlPanel("Control", trainer));

                    // create an accuracy cost that updates every iteration
                    var cost = new TrainerChartPanel <CartesianChart, LineSeries, TickChartValues <double>, double>("Cost", trainer, "optimiser.cost_total", TimeStep.Every(1, TimeScale.Iteration));
                    // improve the chart performance
                    cost.Fast();

                    // add the newly created panel
                    window.TabControl["Overview"].AddCumulativePanel(cost);

                    // finish initialisation
                    window.IsInitializing = false;
                });

                // the operators should not run instantly but when the user clicks play
                sigma.StartOperatorsOnRun = false;
            }

            sigma.Prepare();

            sigma.Run();
        }
Пример #25
0
        public void TestMinibatchIteratorCreate()
        {
            string filename = ".unittestfile" + nameof(TestMinibatchIteratorCreate);

            CreateCsvTempFile(filename);
            SigmaEnvironment.Clear();

            FileSource         source    = new FileSource(filename, Path.GetTempPath());
            CsvRecordExtractor extractor = (CsvRecordExtractor) new CsvRecordReader(source).Extractor(new CsvRecordExtractor(new Dictionary <string, int[][]> {
                ["inputs"] = new[] { new[] { 0 } }
            }));
            ExtractedDataset dataset = new ExtractedDataset("test", 1, new DiskCacheProvider(Path.GetTempPath() + "/" + nameof(TestMinibatchIteratorYield)), true, extractor);

            Assert.Throws <ArgumentException>(() => new MinibatchIterator(-3, dataset));
            Assert.Throws <ArgumentNullException>(() => new MinibatchIterator(1, null));

            dataset.Dispose();

            DeleteTempFile(filename);
        }
Пример #26
0
        public void TestUndividedIteratorYield()
        {
            string filename = ".unittestfile" + nameof(TestUndividedIteratorCreate);

            CreateCsvTempFile(filename);

            SigmaEnvironment.Clear();

            FileSource         source    = new FileSource(filename, Path.GetTempPath());
            CsvRecordExtractor extractor = (CsvRecordExtractor) new CsvRecordReader(source).Extractor(new CsvRecordExtractor(new Dictionary <string, int[][]> {
                ["inputs"] = new[] { new[] { 0 } }
            }));
            ExtractedDataset    dataset  = new ExtractedDataset("test", 2, new DiskCacheProvider(Path.GetTempPath() + "/" + nameof(TestUndividedIteratorCreate)), true, extractor);
            UndividedIterator   iterator = new UndividedIterator(dataset);
            SigmaEnvironment    sigma    = SigmaEnvironment.Create("test");
            IComputationHandler handler  = new CpuFloat32Handler();

            int index = 0;

            foreach (var block in iterator.Yield(handler, sigma))
            {
                if (index == 0)
                {
                    Assert.AreEqual(new float[] { 5.1f, 4.9f }, block["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2));
                }
                else if (index == 1)
                {
                    Assert.AreEqual(new float[] { 4.7f }, block["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 1));
                }
                else
                {
                    Assert.Fail("There can be a maximum of two iterations, but this is yield iteration 3 (index 2).");
                }

                index++;
            }

            dataset.Dispose();

            DeleteTempFile(filename);
        }
Пример #27
0
        private static void SampleCachedFastIteration()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("test");

            IDataSource dataSource = new CompressedSource(new MultiSource(new FileSource("train-images-idx3-ubyte.gz"), new UrlSource("http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz")));

            ByteRecordReader mnistImageReader    = new ByteRecordReader(headerLengthBytes: 16, recordSizeBytes: 28 * 28, source: dataSource);
            IRecordExtractor mnistImageExtractor = mnistImageReader.Extractor("inputs", new[] { 0L, 0L }, new[] { 28L, 28L }).Preprocess(new NormalisingPreprocessor(0, 255));

            IDataset dataset = new ExtractedDataset("mnist-training", ExtractedDataset.BlockSizeAuto, mnistImageExtractor);

            IDataset[] slices       = dataset.SplitRecordwise(0.8, 0.2);
            IDataset   trainingData = slices[0];

            Stopwatch stopwatch = Stopwatch.StartNew();

            IDataIterator iterator = new MinibatchIterator(10, trainingData);

            foreach (var block in iterator.Yield(new CpuFloat32Handler(), sigma))
            {
                //PrintFormattedBlock(block, PrintUtils.AsciiGreyscalePalette);
            }

            Console.Write("\nFirst iteration took " + stopwatch.Elapsed + "\n+=+ Iterating over dataset again +=+ Dramatic pause...");

            ArrayUtils.Range(1, 10).ToList().ForEach(i =>
            {
                Thread.Sleep(500);
                Console.Write(".");
            });

            stopwatch.Restart();

            foreach (var block in iterator.Yield(new CpuFloat32Handler(), sigma))
            {
                //PrintFormattedBlock(block, PrintUtils.AsciiGreyscalePalette);
            }

            Console.WriteLine("Second iteration took " + stopwatch.Elapsed);
        }
Пример #28
0
        /// <summary>
        /// Create an IRIS trainer that observers the current epoch and iteration
        /// </summary>
        /// <param name="sigma">The sigma environemnt.</param>
        /// <returns>The newly created trainer that can be added to the environemnt.</returns>
        private static ITrainer CreateIrisTrainer(SigmaEnvironment sigma)
        {
            CsvRecordReader  irisReader    = new CsvRecordReader(new MultiSource(new FileSource("iris.data"), new UrlSource("http://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data")));
            IRecordExtractor irisExtractor = irisReader.Extractor("inputs", new[] { 0, 3 }, "targets", 4).AddValueMapping(4, "Iris-setosa", "Iris-versicolor", "Iris-virginica");

            irisExtractor = irisExtractor.Preprocess(new OneHotPreprocessor(sectionName: "targets", minValue: 0, maxValue: 2));
            irisExtractor = irisExtractor.Preprocess(new PerIndexNormalisingPreprocessor(0, 1, "inputs", 0, 4.3, 7.9, 1, 2.0, 4.4, 2, 1.0, 6.9, 3, 0.1, 2.5));

            Dataset  dataset           = new Dataset("iris", Dataset.BlockSizeAuto, irisExtractor);
            IDataset trainingDataset   = dataset;
            IDataset validationDataset = dataset;

            ITrainer trainer = sigma.CreateTrainer("test");

            trainer.Network = new Network
            {
                Architecture = InputLayer.Construct(4)
                               + FullyConnectedLayer.Construct(10)
                               + FullyConnectedLayer.Construct(20)
                               + FullyConnectedLayer.Construct(10)
                               + FullyConnectedLayer.Construct(3)
                               + OutputLayer.Construct(3)
                               + SquaredDifferenceCostLayer.Construct()
            };
            trainer.TrainingDataIterator = new MinibatchIterator(4, trainingDataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(validationDataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.002);
            trainer.Operator  = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.4));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.01, mean: 0.05));

            trainer.AddHook(new ValueReporterHook("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new ValidationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: 1));
            trainer.AddLocalHook(new CurrentEpochIterationReporter(TimeStep.Every(1, TimeScale.Epoch)));

            return(trainer);
        }
Пример #29
0
        private static ITrainer CreateXorTrainer(SigmaEnvironment sigma)
        {
            RawDataset dataset = new RawDataset("xor");

            dataset.AddRecords("inputs", new[] { 0, 0 }, new[] { 0, 1 }, new[] { 1, 0 }, new[] { 1, 1 });
            dataset.AddRecords("targets", new[] { 0 }, new[] { 1 }, new[] { 1 }, new[] { 0 });

            ITrainer trainer = sigma.CreateTrainer("xor-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(2) + FullyConnectedLayer.Construct(1) + OutputLayer.Construct(1) + SquaredDifferenceCostLayer.Construct();
            trainer.TrainingDataIterator = new UndividedIterator(dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Operator  = new CpuSinglethreadedOperator();
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.01);

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), reportEpochIteration: true));
            trainer.AddLocalHook(new ValueReporter("network.layers.1-fullyconnected._outputs.default.activations", TimeStep.Every(1, TimeScale.Epoch)));

            return(trainer);
        }
Пример #30
0
        private static void SampleNetworkMerging()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("merge_test");

            ITrainer[] trainers       = new ITrainer[3];
            int[]      constantValues = { 2, 10, 70 };

            //INetworkMerger merger = new WeightedNetworkMerger(10d, 10d, 1d);
            INetworkMerger      merger  = new AverageNetworkMerger();
            IComputationHandler handler = new CpuFloat32Handler();

            for (int i = 0; i < trainers.Length; i++)
            {
                trainers[i]         = sigma.CreateTrainer($"MergeTrainer{i}");
                trainers[i].Network = new Network($"{i}");
                trainers[i].Network.Architecture = InputLayer.Construct(2, 2) + ElementwiseLayer.Construct(2 * 2) + OutputLayer.Construct(2);

                trainers[i].AddInitialiser("*.weights", new ConstantValueInitialiser(constantValues[i]));

                trainers[i].Operator = new CpuMultithreadedOperator(5);
                trainers[i].Initialise(handler);
            }

            foreach (ITrainer trainer in trainers)
            {
                Console.WriteLine(trainer.Network.Registry);
            }

            merger.AddMergeEntry("layers.*.weights");
            merger.Merge(trainers[1].Network, trainers[2].Network, handler);

            Console.WriteLine("*******************");
            foreach (ITrainer trainer in trainers)
            {
                Console.WriteLine(trainer.Network.Registry);
            }
        }