Beispiel #1
0
        private static void SampleIris()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("iris");

            sigma.SetRandomSeed(0);

            sigma.Prepare();

            IDataset dataset = Defaults.Datasets.Iris();

            ITrainer trainer = sigma.CreateGhostTrainer("iris-trainer");

            trainer.Network.Architecture = InputLayer.Construct(4)
                                           + FullyConnectedLayer.Construct(12)
                                           + FullyConnectedLayer.Construct(3)
                                           + OutputLayer.Construct(3)
                                           + SquaredDifferenceCostLayer.Construct();
            //trainer.Network = Serialisation.ReadBinaryFileIfExists("iris.sgnet", trainer.Network);

            trainer.TrainingDataIterator = new MinibatchIterator(50, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.06);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            //trainer.AddGlobalHook(new StopTrainingHook(atEpoch: 100));
            //trainer.AddLocalHook(new EarlyStopperHook("optimiser.cost_total", 20, target: ExtremaTarget.Min));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), reportEpochIteration: true));
            //.On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));
            //trainer.AddLocalHook(new DiskSaviorHook<INetwork>("network.self", Namers.Dynamic("iris_epoch{0}.sgnet", "epoch"), verbose: true)
            //    .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: 1));
            trainer.AddHook(new StopTrainingHook(new ThresholdCriteria("shared.classification_accuracy_top1", ComparisonTarget.GreaterThanEquals, 0.98)));

            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(599, TimeScale.Iteration), 128));
            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(1, TimeScale.Epoch), 4));

            //Serialisation.WriteBinaryFile(trainer, "trainer.sgtrainer");
            //trainer = Serialisation.ReadBinaryFile<ITrainer>("trainer.sgtrainer");

            sigma.AddTrainer(trainer);

            sigma.AddMonitor(new HttpMonitor("http://+:8080/sigma/"));

            sigma.PrepareAndRun();
        }
Beispiel #2
0
        private static void SampleMnist()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("mnist");

            sigma.SetRandomSeed(0);

            IDataset dataset = Defaults.Datasets.Mnist();

            ITrainer trainer = sigma.CreateTrainer("mnist-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(28, 28)
                                           + DropoutLayer.Construct(0.2)
                                           + FullyConnectedLayer.Construct(1000, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(800, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(10, activation: "sigmoid")
                                           + OutputLayer.Construct(10)
                                           + SoftMaxCrossEntropyCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(100, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(Defaults.Datasets.MnistValidation()));
            //trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.01);
            //trainer.Optimiser = new MomentumGradientOptimiser(learningRate: 0.01, momentum: 0.9);
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.02);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.1));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new ValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Iteration), reportEpochIteration: true)
                                 .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            var validationTimeStep = TimeStep.Every(1, TimeScale.Epoch);

            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", validationTimeStep, tops: new[] { 1, 2, 3 }));

            for (int i = 0; i < 10; i++)
            {
                trainer.AddGlobalHook(new TargetMaximisationReporter(trainer.Operator.Handler.NDArray(ArrayUtils.OneHot(i, 10), 10), TimeStep.Every(1, TimeScale.Epoch)));
            }

            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(10, TimeScale.Iteration), 32));
            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(1, TimeScale.Epoch), 4));
            trainer.AddHook(new StopTrainingHook(atEpoch: 10));

            sigma.PrepareAndRun();
        }
Beispiel #3
0
        public static ITrainer CreateTicTacToeTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.TicTacToe();

            ITrainer trainer = sigma.CreateTrainer("tictactoe-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(9)
                                           + FullyConnectedLayer.Construct(72, "tanh")
                                           + FullyConnectedLayer.Construct(99, "tanh")
                                           + FullyConnectedLayer.Construct(3, "tanh")
                                           + OutputLayer.Construct(3)
                                           + SoftMaxCrossEntropyCostLayer.Construct();

            trainer.TrainingDataIterator = new MinibatchIterator(21, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new MomentumGradientOptimiser(learningRate: 0.01, momentum: 0.9);
            trainer.Operator  = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: new[] { 1, 2 }));

            trainer.AddGlobalHook(new DiskSaviorHook <INetwork>(TimeStep.Every(1, TimeScale.Epoch), "network.self", Namers.Static("tictactoe.sgnet"), verbose: true)
                                  .On(new ExtremaCriteria("shared.classification_accuracy_top1", ExtremaTarget.Max)));

            return(trainer);
        }
Beispiel #4
0
        private static ITrainer CreateParkinsonsTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Parkinsons();

            ITrainer trainer = sigma.CreateTrainer("parkinsons-trainer");

            trainer.Network = new Network
            {
                Architecture = InputLayer.Construct(22)
                               + FullyConnectedLayer.Construct(140)
                               + FullyConnectedLayer.Construct(20)
                               + FullyConnectedLayer.Construct(1)
                               + OutputLayer.Construct(1)
                               + SquaredDifferenceCostLayer.Construct()
            };

            trainer.TrainingDataIterator = new MinibatchIterator(10, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.01);
            trainer.Operator  = new CpuSinglethreadedOperator(new DebugHandler(new CpuFloat32Handler()));

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new UniClassificationAccuracyReporter("validation", 0.5, TimeStep.Every(1, TimeScale.Epoch)));

            return(trainer);
        }
Beispiel #5
0
        private static ITrainer CreateIrisTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Iris();

            ITrainer trainer = sigma.CreateTrainer("iris-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(4)
                                           + FullyConnectedLayer.Construct(12)
                                           + FullyConnectedLayer.Construct(3)
                                           + OutputLayer.Construct(3)
                                           + SquaredDifferenceCostLayer.Construct();
            //trainer.Network = Serialisation.ReadBinaryFileIfExists("iris.sgnet", trainer.Network);

            trainer.TrainingDataIterator = new MinibatchIterator(50, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.06);
            trainer.Operator  = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            //trainer.AddGlobalHook(new StopTrainingHook(atEpoch: 100));
            //trainer.AddLocalHook(new EarlyStopperHook("optimiser.cost_total", 20, target: ExtremaTarget.Min));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), reportEpochIteration: true));
            //.On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));
            //trainer.AddLocalHook(new DiskSaviorHook<INetwork>("network.self", Namers.Dynamic("iris_epoch{0}.sgnet", "epoch"), verbose: true)
            //    .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: 1));

            return(trainer);
        }
Beispiel #6
0
        private static void SampleParkinsons()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("parkinsons");

            IDataset dataset = Defaults.Datasets.Parkinsons();

            ITrainer trainer = sigma.CreateGhostTrainer("parkinsons-trainer");

            trainer.Network.Architecture = InputLayer.Construct(22)
                                           + FullyConnectedLayer.Construct(140)
                                           + FullyConnectedLayer.Construct(20)
                                           + FullyConnectedLayer.Construct(1)
                                           + OutputLayer.Construct(1)
                                           + SquaredDifferenceCostLayer.Construct();

            trainer.TrainingDataIterator = new MinibatchIterator(10, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.01);

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new UniClassificationAccuracyReporter("validation", 0.5, TimeStep.Every(1, TimeScale.Epoch)));

            sigma.AddTrainer(trainer);

            sigma.PrepareAndRun();
        }
Beispiel #7
0
        private static void SampleWdbc()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("wdbc");

            IDataset dataset = Defaults.Datasets.Wdbc();

            ITrainer trainer = sigma.CreateGhostTrainer("wdbc-trainer");

            trainer.Network.Architecture = InputLayer.Construct(30)
                                           + FullyConnectedLayer.Construct(42)
                                           + FullyConnectedLayer.Construct(24)
                                           + FullyConnectedLayer.Construct(1)
                                           + OutputLayer.Construct(1)
                                           + SquaredDifferenceCostLayer.Construct();

            trainer.TrainingDataIterator = new MinibatchIterator(72, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.005);

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new UniClassificationAccuracyReporter("validation", 0.5, TimeStep.Every(1, TimeScale.Epoch)));

            sigma.AddTrainer(trainer);

            sigma.AddMonitor(new HttpMonitor("http://+:8080/sigma/"));

            sigma.PrepareAndRun();
        }
Beispiel #8
0
        /// <summary>
        /// Set the trainer and hook. Attach the hook.
        /// </summary>
        /// <param name="trainer">The trainer that will be set.</param>
        /// <param name="hook">The hook that will be applied.</param>
        protected void Init(ITrainer trainer, VisualAccumulatedValueReporterHook hook)
        {
            Trainer      = trainer;
            AttachedHook = hook;
            Trainer.AddHook(hook);
            Trainer.AddGlobalHook(new LambdaHook(TimeStep.Every(1, TimeScale.Stop), (registry, resolver) => Clear()));

            // TODO: is a formatter the best solution?
            AxisX.LabelFormatter = number => (number * hook.TimeStep.Interval).ToString(CultureInfo.InvariantCulture);
            AxisX.Unit           = hook.TimeStep.Interval;
        }
Beispiel #9
0
        /// <summary>
        /// Create an IRIS trainer that observers the current epoch and iteration
        /// </summary>
        /// <param name="sigma">The sigma environemnt.</param>
        /// <returns>The newly created trainer that can be added to the environemnt.</returns>
        private static ITrainer CreateIrisTrainer(SigmaEnvironment sigma)
        {
            CsvRecordReader  irisReader    = new CsvRecordReader(new MultiSource(new FileSource("iris.data"), new UrlSource("http://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data")));
            IRecordExtractor irisExtractor = irisReader.Extractor("inputs", new[] { 0, 3 }, "targets", 4).AddValueMapping(4, "Iris-setosa", "Iris-versicolor", "Iris-virginica");

            irisExtractor = irisExtractor.Preprocess(new OneHotPreprocessor(sectionName: "targets", minValue: 0, maxValue: 2));
            irisExtractor = irisExtractor.Preprocess(new PerIndexNormalisingPreprocessor(0, 1, "inputs", 0, 4.3, 7.9, 1, 2.0, 4.4, 2, 1.0, 6.9, 3, 0.1, 2.5));

            Dataset  dataset           = new Dataset("iris", Dataset.BlockSizeAuto, irisExtractor);
            IDataset trainingDataset   = dataset;
            IDataset validationDataset = dataset;

            ITrainer trainer = sigma.CreateTrainer("test");

            trainer.Network = new Network
            {
                Architecture = InputLayer.Construct(4)
                               + FullyConnectedLayer.Construct(10)
                               + FullyConnectedLayer.Construct(20)
                               + FullyConnectedLayer.Construct(10)
                               + FullyConnectedLayer.Construct(3)
                               + OutputLayer.Construct(3)
                               + SquaredDifferenceCostLayer.Construct()
            };
            trainer.TrainingDataIterator = new MinibatchIterator(4, trainingDataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(validationDataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.002);
            trainer.Operator  = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.4));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.01, mean: 0.05));

            trainer.AddHook(new ValueReporterHook("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new ValidationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: 1));
            trainer.AddLocalHook(new CurrentEpochIterationReporter(TimeStep.Every(1, TimeScale.Epoch)));

            return(trainer);
        }
Beispiel #10
0
        ///  <summary>
        ///      Create a BitmapPanel that can easily be updated by a hook. This hook will be automatically attached to a given trainer
        ///  </summary>
        /// <param name="title">The given tile.</param>
        /// <param name="trainer">The trainer the hook will be applied to.</param>
        /// <param name="headerContent">The content for the header. If <c>null</c> is passed, the title will be used.</param>
        /// <param name="inputWidth">The width of the bitmappanel (not the actual width but the width of the data grid).</param>
        /// <param name="inputHeight">The height of the bitmappanel (not the actual height but the height of the data grid).</param>
        /// <param name="hook">A hook. </param>
        protected BitmapHookPanel(string title, int inputWidth, int inputHeight, IHook hook, ITrainer trainer, object headerContent = null)
            : this(title, inputWidth, inputHeight, headerContent)
        {
            if (hook == null)
            {
                throw new ArgumentNullException(nameof(hook));
            }
            if (trainer == null)
            {
                throw new ArgumentNullException(nameof(trainer));
            }

            trainer.AddHook(hook);
        }
Beispiel #11
0
        /// <summary>
        /// Create an AccuracyPanel with a given title. It displays given accuracies per epoch.
        /// If a title is not sufficient modify <see cref="SigmaPanel.Header" />.
        /// </summary>
        /// <param name="title">The given tile.</param>
        /// <param name="trainer"></param>
        /// <param name="headerContent">The content for the header. If <c>null</c> is passed,
        /// the title will be used.</param>
        /// <param name="tops"></param>
        public AccuracyPanel(string title, ITrainer trainer, ITimeStep timeStep, object headerContent = null, params int[] tops) : base(title, headerContent)
        {
            if (timeStep == null)
            {
                throw new ArgumentNullException(nameof(timeStep));
            }

            // skip the first since its automatically generated
            for (int i = 1; i < tops.Length; i++)
            {
                AddSeries(new LineSeries());
            }

            trainer.AddHook(new ChartValidationAccuracyReport(this, "validation", timeStep, tops));
            trainer.AddGlobalHook(new LambdaHook(TimeStep.Every(1, TimeScale.Stop), (registry, resolver) => Clear()));

            AxisY.MinValue = 0;
            AxisY.MaxValue = 100;
        }