Esempio n. 1
0
        public void TestRecurrent()
        {
            var data        = BinaryIntegers.Addition(100, false).Split(0);
            var graph       = new GraphFactory(_lap);
            var errorMetric = graph.ErrorMetric.BinaryClassification;

            graph.CurrentPropertySet
            .Use(graph.GradientDescent.Adam)
            .Use(graph.GaussianWeightInitialisation(false, 0.1f, GaussianVarianceCalibration.SquareRoot2N));

            // create the engine
            var trainingData = graph.CreateDataSource(data.Training);
            var testData     = trainingData.CloneWith(data.Test);
            var engine       = graph.CreateTrainingEngine(trainingData, learningRate: 0.01f, batchSize: 16);

            // build the network
            const int HIDDEN_LAYER_SIZE = 32, TRAINING_ITERATIONS = 5;
            var       memory  = new float[HIDDEN_LAYER_SIZE];
            var       network = graph.Connect(engine)
                                .AddSimpleRecurrent(graph.ReluActivation(), memory)
                                .AddFeedForward(engine.DataSource.OutputSize)
                                .Add(graph.ReluActivation())
                                .AddBackpropagationThroughTime(errorMetric)
            ;

            // train the network for twenty iterations, saving the model on each improvement
            BrightWire.Models.ExecutionGraph bestGraph = null;
            engine.Train(TRAINING_ITERATIONS, testData, errorMetric, bn => bestGraph = bn.Graph);

            // export the graph and verify it against some unseen integers on the best model
            var executionEngine = graph.CreateEngine(bestGraph ?? engine.Graph);
            var testData2       = graph.CreateDataSource(BinaryIntegers.Addition(8, true));
            var results         = executionEngine.Execute(testData2);
        }
Esempio n. 2
0
        public static void TrainWithSelu(string dataFilesPath)
        {
            using (var lap = BrightWireProvider.CreateLinearAlgebra()) {
                var graph = new GraphFactory(lap);

                // parse the iris CSV into a data table and normalise
                var dataTable = new StreamReader(new MemoryStream(File.ReadAllBytes(dataFilesPath))).ParseCSV(',').Normalise(NormalisationType.Standard);

                // split the data table into training and test tables
                var split        = dataTable.Split(0);
                var trainingData = graph.CreateDataSource(split.Training);
                var testData     = graph.CreateDataSource(split.Test);

                // one hot encoding uses the index of the output vector's maximum value as the classification label
                var errorMetric = graph.ErrorMetric.OneHotEncoding;

                // configure the network properties
                graph.CurrentPropertySet
                .Use(graph.GradientDescent.RmsProp)
                .Use(graph.GaussianWeightInitialisation(true, 0.1f, GaussianVarianceCalibration.SquareRoot2N, GaussianVarianceCount.FanInFanOut))
                ;

                // create the training engine and schedule a training rate change
                const float TRAINING_RATE = 0.1f;
                var         engine        = graph.CreateTrainingEngine(trainingData, TRAINING_RATE, batchSize: 128);

                const int LAYER_SIZE = 64;

                Func <INode> activation = () => new SeluActivation();
                //Func<INode> activation = () => graph.ReluActivation();

                // create the network with the custom activation function
                graph.Connect(engine)
                .AddFeedForward(LAYER_SIZE)
                .AddBatchNormalisation()
                .Add(activation())
                .AddFeedForward(LAYER_SIZE)
                .AddBatchNormalisation()
                .Add(activation())
                .AddFeedForward(LAYER_SIZE)
                .AddBatchNormalisation()
                .Add(activation())
                .AddFeedForward(LAYER_SIZE)
                .AddBatchNormalisation()
                .Add(activation())
                .AddFeedForward(LAYER_SIZE)
                .AddBatchNormalisation()
                .Add(activation())
                .AddFeedForward(LAYER_SIZE)
                .AddBatchNormalisation()
                .Add(activation())
                .AddFeedForward(trainingData.OutputSize)
                .Add(graph.SoftMaxActivation())
                .AddBackpropagation(errorMetric)
                ;

                const int TRAINING_ITERATIONS = 500;
                engine.Train(TRAINING_ITERATIONS, testData, errorMetric, null, 50);
            }
        }
Esempio n. 3
0
        public void VectorDataSource()
        {
            var vectors = Enumerable.Range(0, 10).Select(i => FloatVector.Create(GetArray(i, 10))).
                          ToList();
            var dataSource      = _factory.CreateDataSource(vectors);
            var miniBatch       = dataSource.Get(null, new[] { 0, 1, 2 });
            var currentSequence = miniBatch.CurrentSequence;
            var batchMatrix     = currentSequence.Input[0].GetMatrix();

            Assert.IsNull(currentSequence.Target);
            Assert.IsTrue(batchMatrix.RowCount == 3);
            Assert.IsTrue(batchMatrix.ColumnCount == 10);
            Assert.AreEqual(batchMatrix.Row(0).GetAt(0), 0f);
            Assert.AreEqual(batchMatrix.Row(1).GetAt(0), 1f);
        }
Esempio n. 4
0
        static (GraphFactory, IDataSource) MakeGraphAndData()
        {
            var graph = new GraphFactory(_cpu);
            var data  = graph.CreateDataSource(And.Get());

            return(graph, data);
        }
Esempio n. 5
0
        /// <summary>
        /// Trains a linear regression model to predict bicycle sharing patterns
        /// Files can be downloaded from https://archive.ics.uci.edu/ml/machine-learning-databases/00275/
        /// </summary>
        /// <param name="dataFilePath">The path to the csv file</param>
        public static void PredictBicyclesWithNeuralNetwork(string dataFilePath)
        {
            var dataTable = _LoadBicyclesDataTable(dataFilePath);
            var split     = dataTable.Split(0);

            using (var lap = BrightWireProvider.CreateLinearAlgebra(false))
            {
                var graph        = new GraphFactory(lap);
                var errorMetric  = graph.ErrorMetric.Quadratic;
                var trainingData = graph.CreateDataSource(split.Training);
                var testData     = trainingData.CloneWith(split.Test);
                graph.CurrentPropertySet
                .Use(graph.Adam())
                ;

                var engine = graph.CreateTrainingEngine(trainingData, 1.3f, 128);
                graph.Connect(engine)
                .AddFeedForward(16)
                .Add(graph.SigmoidActivation())
                //.AddDropOut(dropOutPercentage: 0.5f)
                .AddFeedForward(engine.DataSource.OutputSize)
                //.Add(graph.SigmoidActivation())
                .AddBackpropagation(errorMetric)
                ;

                engine.Train(500, testData, errorMetric);
            }
        }
Esempio n. 6
0
        static void ManyToOne()
        {
            var grammar   = new SequenceClassification(dictionarySize: 10, minSize: 5, maxSize: 5, noRepeat: true, isStochastic: false);
            var sequences = grammar.GenerateSequences().Take(1000).ToList();
            var builder   = BrightWireProvider.CreateDataTableBuilder();

            builder.AddColumn(ColumnType.Matrix, "Sequence");
            builder.AddColumn(ColumnType.Vector, "Summary");

            foreach (var sequence in sequences)
            {
                var list    = new List <FloatVector>();
                var charSet = new HashSet <char>();
                foreach (var ch in sequence)
                {
                    charSet.Add(ch);
                    var row = grammar.Encode(charSet.Select(ch2 => (ch2, 1f)));
                    list.Add(row);
                }
                builder.Add(new FloatMatrix {
                    Row = list.ToArray()
                }, list.Last());
            }
            var data = builder.Build().Split(0);

            using (var lap = BrightWireProvider.CreateLinearAlgebra(false)) {
                var graph       = new GraphFactory(lap);
                var errorMetric = graph.ErrorMetric.BinaryClassification;

                // create the property set
                var propertySet = graph.CurrentPropertySet
                                  .Use(graph.GradientDescent.RmsProp)
                                  .Use(graph.WeightInitialisation.Xavier)
                ;

                // create the engine
                var trainingData = graph.CreateDataSource(data.Training);
                var testData     = trainingData.CloneWith(data.Test);
                var engine       = graph.CreateTrainingEngine(trainingData, 0.03f, 8);

                // build the network
                const int HIDDEN_LAYER_SIZE = 128;
                var       memory            = new float[HIDDEN_LAYER_SIZE];
                var       network           = graph.Connect(engine)
                                              .AddLstm(memory)
                                              //.AddSimpleRecurrent(graph.ReluActivation(), memory)
                                              .AddFeedForward(engine.DataSource.OutputSize)
                                              .Add(graph.SigmoidActivation())
                                              .AddBackpropagationThroughTime(errorMetric)
                ;

                engine.Train(10, testData, errorMetric);

                var networkGraph    = engine.Graph;
                var executionEngine = graph.CreateEngine(networkGraph);

                var output = executionEngine.Execute(testData);
                Console.WriteLine(output.Where(o => o.Target != null).Average(o => o.CalculateError(errorMetric)));
            }
        }
Esempio n. 7
0
        public void DefaultDataSource()
        {
            var builder = BrightWireProvider.CreateDataTableBuilder();

            builder.AddColumn(ColumnType.Float, "val1");
            builder.AddColumn(ColumnType.Double, "val2");
            builder.AddColumn(ColumnType.String, "val3");
            builder.AddColumn(ColumnType.String, "cls", true);
            builder.Add(0.5f, 1.1, "d", "a");
            builder.Add(0.2f, 1.5, "c", "b");
            builder.Add(0.7f, 0.5, "b", "c");
            builder.Add(0.2f, 0.6, "a", "d");
            var table          = builder.Build();
            var vectoriser     = table.GetVectoriser();
            var graph          = new GraphFactory(_lap);
            var dataSource     = graph.CreateDataSource(table, vectoriser);
            var miniBatch      = dataSource.Get(null, new[] { 1 });
            var input          = miniBatch.CurrentSequence.Input[0].GetMatrix().Row(0).AsIndexable();
            var expectedOutput = miniBatch.CurrentSequence.Target.GetMatrix().Row(0).AsIndexable();

            Assert.AreEqual(input[0], 0.2f);
            Assert.AreEqual(input[1], 1.5f);
            Assert.AreEqual(expectedOutput.Count, 4);
            Assert.AreEqual(vectoriser.GetOutputLabel(2, expectedOutput.MaximumIndex()), "b");
        }
Esempio n. 8
0
        public static void IntegerAddition()
        {
            // generate 1000 random integer additions (split into training and test sets)
            var data = BinaryIntegers.Addition(1000, false).Split(0);

            using (var lap = BrightWireProvider.CreateLinearAlgebra(false)) {
                var graph = new GraphFactory(lap);

                // binary classification rounds each output to either 0 or 1
                var errorMetric = graph.ErrorMetric.BinaryClassification;

                // configure the network properties
                graph.CurrentPropertySet
                .Use(graph.GradientDescent.Adam)
                .Use(graph.GaussianWeightInitialisation(false, 0.1f, GaussianVarianceCalibration.SquareRoot2N))
                ;

                // create the engine
                var trainingData = graph.CreateDataSource(data.Training);
                var testData     = trainingData.CloneWith(data.Test);
                var engine       = graph.CreateTrainingEngine(trainingData, learningRate: 0.01f, batchSize: 16);

                // build the network
                const int HIDDEN_LAYER_SIZE = 32, TRAINING_ITERATIONS = 30;
                var       memory  = new float[HIDDEN_LAYER_SIZE];
                var       network = graph.Connect(engine)
                                    .AddSimpleRecurrent(graph.ReluActivation(), memory)
                                    .AddFeedForward(engine.DataSource.OutputSize)
                                    .Add(graph.ReluActivation())
                                    .AddBackpropagationThroughTime(errorMetric)
                ;

                // train the network for twenty iterations, saving the model on each improvement
                Models.ExecutionGraph bestGraph = null;
                engine.Train(TRAINING_ITERATIONS, testData, errorMetric, bn => bestGraph = bn.Graph);

                // export the graph and verify it against some unseen integers on the best model
                var executionEngine = graph.CreateEngine(bestGraph ?? engine.Graph);
                var testData2       = graph.CreateDataSource(BinaryIntegers.Addition(8, true));
                var results         = executionEngine.Execute(testData2);

                // group the output
                var groupedResults = new (FloatVector[] Input, FloatVector[] Target, FloatVector[] Output)[8];
        static void OneToMany()
        {
            var grammar = new SequenceGenerator(dictionarySize: 10, minSize: 5, maxSize: 5,
                                                noRepeat: true, isStochastic: false);
            var sequences = grammar.GenerateSequences().Take(1000).ToList();
            var builder   = BrightWireProvider.CreateDataTableBuilder();

            builder.AddColumn(ColumnType.Vector, "Summary");
            builder.AddColumn(ColumnType.Matrix, "Sequence");
            foreach (var sequence in sequences)
            {
                var sequenceData = sequence.GroupBy(ch => ch).Select(g => (g.Key, g.Count())).
                                   ToDictionary(d => d.Item1, d => (float)d.Item2);
                var summary = grammar.Encode(sequenceData.Select(kv => (kv.Key, kv.Value)));
                var list    = new List <FloatVector>();
                foreach (var item in sequenceData.OrderBy(kv => kv.Key))
                {
                    var row = grammar.Encode(item.Key, item.Value);
                    list.Add(row);
                }

                builder.Add(summary, FloatMatrix.Create(list.ToArray()));
            }

            var data = builder.Build().Split(0);

            using var lap = BrightWireProvider.CreateLinearAlgebra(false);
            var graph       = new GraphFactory(lap);
            var errorMetric = graph.ErrorMetric.BinaryClassification;

            // create the property set
            var propertySet = graph.CurrentPropertySet.Use(graph.GradientDescent.RmsProp).
                              Use(graph.WeightInitialisation.Xavier);

            // create the engine
            const float TRAINING_RATE = 0.1f;
            var         trainingData  = graph.CreateDataSource(data.Training);
            var         testData      = trainingData.CloneWith(data.Test);
            var         engine        = graph.CreateTrainingEngine(trainingData, TRAINING_RATE, 8);

            engine.LearningContext.ScheduleLearningRate(30, TRAINING_RATE / 3);

            // build the network
            const int HIDDEN_LAYER_SIZE = 128;

            graph.Connect(engine).AddLstm(HIDDEN_LAYER_SIZE).AddFeedForward(engine.DataSource.OutputSize).
            Add(graph.SigmoidActivation()).AddBackpropagation(errorMetric);
            engine.Train(40, testData, errorMetric);
            var networkGraph    = engine.Graph;
            var executionEngine = graph.CreateEngine(networkGraph);
            var output          = executionEngine.Execute(testData);

            Console.WriteLine(output.Average(o => o.CalculateError(errorMetric)));
        }
Esempio n. 10
0
        /// <summary>
        /// Trains a feed forward neural net on the emotion dataset
        /// http://lpis.csd.auth.gr/publications/tsoumakas-ismir08.pdf
        /// The data files can be downloaded from https://downloads.sourceforge.net/project/mulan/datasets/emotions.rar
        /// </summary>
        /// <param name="dataFilePath"></param>
        public static void MultiLabelSingleClassifier(string dataFilePath)
        {
            var emotionData           = _LoadEmotionData(dataFilePath);
            var attributeColumns      = Enumerable.Range(0, emotionData.ColumnCount - CLASSIFICATION_COUNT).ToList();
            var classificationColumns = Enumerable.Range(emotionData.ColumnCount - CLASSIFICATION_COUNT, CLASSIFICATION_COUNT).ToList();

            // create a new data table with a vector input column and a vector output column
            var dataTableBuilder = BrightWireProvider.CreateDataTableBuilder();

            dataTableBuilder.AddColumn(ColumnType.Vector, "Attributes");
            dataTableBuilder.AddColumn(ColumnType.Vector, "Target", isTarget: true);
            emotionData.ForEach(row => {
                var input  = FloatVector.Create(row.GetFields <float>(attributeColumns).ToArray());
                var target = FloatVector.Create(row.GetFields <float>(classificationColumns).ToArray());
                dataTableBuilder.Add(input, target);
                return(true);
            });
            var data = dataTableBuilder.Build().Split(0);

            // train a neural network
            using (var lap = BrightWireProvider.CreateLinearAlgebra(false)) {
                var graph = new GraphFactory(lap);

                // binary classification rounds each output to 0 or 1 and compares each output against the binary classification targets
                var errorMetric = graph.ErrorMetric.BinaryClassification;

                // configure the network properties
                graph.CurrentPropertySet
                .Use(graph.GradientDescent.Adam)
                .Use(graph.WeightInitialisation.Xavier)
                ;

                // create a training engine
                const float TRAINING_RATE = 0.3f;
                var         trainingData  = graph.CreateDataSource(data.Training);
                var         testData      = trainingData.CloneWith(data.Test);
                var         engine        = graph.CreateTrainingEngine(trainingData, TRAINING_RATE, 128);

                // build the network
                const int HIDDEN_LAYER_SIZE = 64, TRAINING_ITERATIONS = 2000;
                var       network = graph.Connect(engine)
                                    .AddFeedForward(HIDDEN_LAYER_SIZE)
                                    .Add(graph.SigmoidActivation())
                                    .AddDropOut(dropOutPercentage: 0.5f)
                                    .AddFeedForward(engine.DataSource.OutputSize)
                                    .Add(graph.SigmoidActivation())
                                    .AddBackpropagation(errorMetric)
                ;

                // train the network
                engine.Train(TRAINING_ITERATIONS, testData, errorMetric, null, 50);
            }
        }
Esempio n. 11
0
        public static void TrainWithSelu(string dataFilesPath)
        {
            using (var lap = BrightWireGpuProvider.CreateLinearAlgebra()) {
                var graph = new GraphFactory(lap);

                // parse the iris CSV into a data table and normalise
                var dataTable = new StreamReader(new MemoryStream(File.ReadAllBytes(dataFilesPath))).ParseCSV(',').Normalise(NormalisationType.Standard);

                // split the data table into training and test tables
                var split        = dataTable.Split(0);
                var trainingData = graph.CreateDataSource(split.Training);
                var testData     = graph.CreateDataSource(split.Test);

                // use a one hot encoding error metric, rmsprop gradient descent and xavier weight initialisation
                var errorMetric = graph.ErrorMetric.OneHotEncoding;
                var propertySet = graph.CurrentPropertySet
                                  .Use(graph.GradientDescent.RmsProp)
                                  .Use(graph.GaussianWeightInitialisation(true, 0.1f, GaussianVarianceCalibration.SquareRoot2N, GaussianVarianceCount.FanInFanOut))
                ;

                // create the training engine and schedule a training rate change
                const float TRAINING_RATE = 0.01f;
                var         engine        = graph.CreateTrainingEngine(trainingData, TRAINING_RATE, 128);

                // create the network
                graph.Connect(engine)
                .AddFeedForward(32)
                .Add(new SeluActivation())
                .AddFeedForward(trainingData.OutputSize)
                .Add(graph.SigmoidActivation())
                .AddBackpropagation(errorMetric)
                ;

                // train the network
                engine.Train(1000, testData, errorMetric, null, 50);
            }
        }
Esempio n. 12
0
        public void TiedAutoEncoder()
        {
            const int DATA_SIZE = 1000, REDUCED_SIZE = 200;

            // create some random data
            var rand    = new Random();
            var builder = BrightWireProvider.CreateDataTableBuilder();

            builder.AddVectorColumn(DATA_SIZE, "Input");
            builder.AddVectorColumn(DATA_SIZE, "Output", true);
            for (var i = 0; i < 100; i++)
            {
                var vector = new FloatVector {
                    Data = Enumerable.Range(0, DATA_SIZE).Select(j => Convert.ToSingle(rand.NextDouble())).ToArray()
                };
                builder.Add(vector, vector);
            }
            var dataTable = builder.Build();

            // build the autoencoder with tied weights
            var graph       = new GraphFactory(_lap);
            var dataSource  = graph.CreateDataSource(dataTable);
            var engine      = graph.CreateTrainingEngine(dataSource, 0.03f, 32);
            var errorMetric = graph.ErrorMetric.Quadratic;

            graph.CurrentPropertySet
            .Use(graph.RmsProp())
            .Use(graph.WeightInitialisation.Xavier)
            ;

            graph.Connect(engine)
            .AddFeedForward(REDUCED_SIZE, "layer")
            .Add(graph.TanhActivation())
            .AddTiedFeedForward(engine.Start.FindByName("layer") as IFeedForward)
            .Add(graph.TanhActivation())
            .AddBackpropagation(errorMetric)
            ;
            using (var executionContext = graph.CreateExecutionContext()) {
                for (var i = 0; i < 2; i++)
                {
                    var trainingError = engine.Train(executionContext);
                }
            }
            var networkGraph    = engine.Graph;
            var executionEngine = graph.CreateEngine(networkGraph);
            var results         = executionEngine.Execute(dataTable.GetRow(0).GetField <FloatVector>(0).Data);
        }
Esempio n. 13
0
        public static IDataSource BuildTensors(GraphFactory graph, IDataSource existing, IReadOnlyList <Mnist.Image> images)
        {
            var dataTable = BrightWireProvider.CreateDataTableBuilder();

            dataTable.AddColumn(ColumnType.Tensor, "Image");
            dataTable.AddColumn(ColumnType.Vector, "Target", true);
            foreach (var image in images)
            {
                var data = image.AsFloatTensor;
                dataTable.Add(data.Tensor, data.Label);
            }
            if (existing != null)
            {
                return(existing.CloneWith(dataTable.Build()));
            }
            else
            {
                return(graph.CreateDataSource(dataTable.Build()));
            }
        }
Esempio n. 14
0
        static IDataSource _BuildTensors(GraphFactory graph, IDataSource existing,
                                         IReadOnlyList <Mnist.Image> images)
        {
            // convolutional neural networks expect a 3D tensor => vector mapping
            var dataTable = BrightWireProvider.CreateDataTableBuilder();

            dataTable.AddColumn(ColumnType.Tensor, "Image");
            dataTable.AddColumn(ColumnType.Vector, "Target", isTarget: true);
            foreach (var image in images)
            {
                var data = image.AsFloatTensor;
                dataTable.Add(data.Tensor, data.Label);
            }

            // reuse the network used for training when building the test data source
            if (existing != null)
            {
                return(existing.CloneWith(dataTable.Build()));
            }
            return(graph.CreateDataSource(dataTable.Build()));
        }
Esempio n. 15
0
        /// <summary>
        /// Uses a recurrent LSTM neural network to predict stock price movements
        /// Data can be downloaded from https://raw.githubusercontent.com/plotly/datasets/master/stockdata.csv
        /// </summary>
        static void StockData(string dataFilePath)
        {
            // load and normalise the data
            var dataSet    = new StreamReader(dataFilePath).ParseCSV(',', true);
            var normalised = dataSet.Normalise(NormalisationType.FeatureScale);
            var rows       = normalised.GetNumericRows(dataSet.Columns.Where(c => c.Name != "Date").Select(c => c.Index));

            // build the data table with a window of input data and the prediction as the following value
            var builder = BrightWireProvider.CreateDataTableBuilder();

            builder.AddColumn(ColumnType.Matrix, "Past");
            builder.AddColumn(ColumnType.Vector, "Future");
            const int LAST_X_DAYS = 14;

            for (var i = 0; i < rows.Count - LAST_X_DAYS - 1; i++)
            {
                var inputVector = new List <FloatVector>();
                for (var j = 0; j < LAST_X_DAYS; j++)
                {
                    inputVector.Add(FloatVector.Create(rows[i + j]));
                }
                var input  = FloatMatrix.Create(inputVector.ToArray());
                var target = FloatVector.Create(rows[i + LAST_X_DAYS + 1]);
                builder.Add(input, target);
            }
            var data = builder.Build().Split(trainingPercentage: 0.2);

            using (var lap = BrightWireProvider.CreateLinearAlgebra()) {
                var graph       = new GraphFactory(lap);
                var errorMetric = graph.ErrorMetric.Quadratic;

                // create the property set
                graph.CurrentPropertySet
                .Use(graph.GradientDescent.Adam)
                .Use(graph.WeightInitialisation.Xavier);

                // create the engine
                var trainingData = graph.CreateDataSource(data.Training);
                var testData     = trainingData.CloneWith(data.Test);
                var engine       = graph.CreateTrainingEngine(trainingData, learningRate: 0.03f, batchSize: 128);

                // build the network
                const int HIDDEN_LAYER_SIZE = 256;
                graph.Connect(engine)
                .AddLstm(HIDDEN_LAYER_SIZE)
                .AddFeedForward(engine.DataSource.OutputSize)
                .Add(graph.TanhActivation())
                .AddBackpropagationThroughTime(errorMetric);

                // train the network and restore the best result
                GraphModel bestNetwork = null;
                engine.Train(50, testData, errorMetric, model => bestNetwork = model);
                if (bestNetwork != null)
                {
                    // execute each row of the test data on an execution engine
                    var executionEngine = graph.CreateEngine(bestNetwork.Graph);
                    var results         = executionEngine.Execute(testData).OrderSequentialOutput();
                    var expectedOutput  = data.Test.GetColumn <FloatVector>(1);

                    var score = results.Select((r, i) => errorMetric.Compute(r.Last(), expectedOutput[i])).Average();
                    Console.WriteLine(score);
                }
            }
        }
Esempio n. 16
0
        public static void XOR()
        {
            using (var lap = BrightWireProvider.CreateLinearAlgebra()) {
                // Create some training data that the network will learn.  The XOR pattern looks like:
                // 0 0 => 0
                // 1 0 => 1
                // 0 1 => 1
                // 1 1 => 0
                var data = Xor.Get();

                // create the graph
                var graph       = new GraphFactory(lap);
                var errorMetric = graph.ErrorMetric.CrossEntropy;
                graph.CurrentPropertySet
                // use rmsprop gradient descent optimisation
                .Use(graph.GradientDescent.RmsProp)

                // and xavier weight initialisation
                .Use(graph.WeightInitialisation.Gaussian)
                ;

                // create the engine
                var testData = graph.CreateDataSource(data);
                var engine   = graph.CreateTrainingEngine(testData, 0.1f, 4);

                // create the network
                const int HIDDEN_LAYER_SIZE = 6;
                graph.Connect(engine)
                // create a feed forward layer with sigmoid activation
                .AddFeedForward(HIDDEN_LAYER_SIZE)
                .Add(graph.SigmoidActivation())

                // create a second feed forward layer with sigmoid activation
                .AddFeedForward(engine.DataSource.OutputSize)
                .Add(graph.SigmoidActivation())

                // backpropagate the error signal at the end of the graph
                .AddBackpropagation(errorMetric)
                ;

                // train the network
                var executionContext = graph.CreateExecutionContext();
                for (var i = 0; i < 1000; i++)
                {
                    var trainingError = engine.Train(executionContext);
                    if (i % 100 == 0)
                    {
                        engine.Test(testData, errorMetric);
                    }
                }
                engine.Test(testData, errorMetric);

                // create a new network to execute the learned network
                var networkGraph    = engine.Graph;
                var executionEngine = graph.CreateEngine(networkGraph);
                var output          = executionEngine.Execute(testData);
                Console.WriteLine(output.Average(o => o.CalculateError(errorMetric)));

                // print the learnt values
                foreach (var item in output)
                {
                    foreach (var index in item.MiniBatchSequence.MiniBatch.Rows)
                    {
                        var row    = data.GetRow(index);
                        var result = item.Output[index];
                        Console.WriteLine($"{row.GetField<int>(0)} XOR {row.GetField<int>(1)} = {result.Data[0]}");
                    }
                }
            }
        }
Esempio n. 17
0
        public static void CrazyTest()
        {
            using (var la = BrightWireProvider.CreateLinearAlgebra())
            {
                var graph = new GraphFactory(la);

                graph.CurrentPropertySet
                .Use(graph.GradientDescent.Adam)
                .Use(graph.GaussianWeightInitialisation(false, 0.1f, GaussianVarianceCalibration.SquareRoot2N));

                var dtbuilder = BrightWireProvider.CreateDataTableBuilder();
                dtbuilder.AddColumn(ColumnType.Tensor, "image", false);
                dtbuilder.AddColumn(ColumnType.Vector, "expectedoutput", true);

                // DATEN GENERIEREN
                for (int i = 0; i < 666; i++)
                {
                    var fv = new BrightWire.Models.FloatVector
                    {
                        Data = new float[666],
                    };

                    var fv2 = new BrightWire.Models.FloatVector
                    {
                        Data = new float[666],
                    };

                    dtbuilder.Add(fv, fv2);
                }
                var datatable = dtbuilder.Build();

                Console.WriteLine($"####### TRAINING START #######");

                // create the engine
                var(Training, Test) = datatable.Split();
                var trainingData = graph.CreateDataSource(Training);

                var testData = graph.CreateDataSource(Test);
                var engine   = graph.CreateTrainingEngine(trainingData, learningRate: 0.01f, batchSize: 16);

                // build the network
                var errorMetric = graph.ErrorMetric.Quadratic;

                const int HIDDEN_LAYER_SIZE = 2000, TRAINING_ITERATIONS = 2000;

                var network = graph.Connect(engine)
                              .AddFeedForward(HIDDEN_LAYER_SIZE)
                              .Add(graph.ReluActivation())
                              //.AddBackpropagation(errorMetric)
                              .AddFeedForward(HIDDEN_LAYER_SIZE)
                              .Add(graph.ReluActivation())
                              //.AddBackpropagation(errorMetric)
                              .AddDropOut(.2f)
                              .AddFeedForward(engine.DataSource.OutputSize)
                              //.Add(graph.ReluActivation())
                              .AddBackpropagation(errorMetric)
                              //.AddBackpropagationThroughTime(errorMetric)
                ;

                // train the network for twenty iterations, saving the model on each improvement
                BrightWire.Models.ExecutionGraph bestGraph = null;
                engine.Train(TRAINING_ITERATIONS, testData, errorMetric, ((modelGraph) => bestGraph = modelGraph.Graph));

                // export the graph and verify it against some unseen integers on the best model
                var executionEngine = graph.CreateEngine(bestGraph ?? engine.Graph);
            }
        }
Esempio n. 18
0
        /// <summary>
        /// Trains multiple classifiers on the emotion data set
        /// http://lpis.csd.auth.gr/publications/tsoumakas-ismir08.pdf
        /// The data files can be downloaded from https://downloads.sourceforge.net/project/mulan/datasets/emotions.rar
        /// </summary>
        /// <param name="dataFilePath"></param>
        public static void MultiLabelMultiClassifiers(string dataFilePath)
        {
            var emotionData           = _LoadEmotionData(dataFilePath);
            var attributeCount        = emotionData.ColumnCount - CLASSIFICATION_COUNT;
            var attributeColumns      = Enumerable.Range(0, attributeCount).ToList();
            var classificationColumns = Enumerable.Range(emotionData.ColumnCount - CLASSIFICATION_COUNT, CLASSIFICATION_COUNT).ToList();
            var classificationLabel   = new[] {
                "amazed-suprised",
                "happy-pleased",
                "relaxing-calm",
                "quiet-still",
                "sad-lonely",
                "angry-aggresive"
            };

            // create six separate datasets to train, each with a separate classification column
            var dataSets = Enumerable.Range(attributeCount, CLASSIFICATION_COUNT).Select(targetIndex => {
                var dataTableBuider = BrightWireProvider.CreateDataTableBuilder();
                for (var i = 0; i < attributeCount; i++)
                {
                    dataTableBuider.AddColumn(ColumnType.Float);
                }
                dataTableBuider.AddColumn(ColumnType.Float, "", true);

                return(emotionData.Project(row => row.GetFields <float>(attributeColumns)
                                           .Concat(new[] { row.GetField <float>(targetIndex) })
                                           .Cast <object>()
                                           .ToList()
                                           ));
            }).Select(ds => ds.Split(0)).ToList();

            // train classifiers on each training set
            using (var lap = BrightWireProvider.CreateLinearAlgebra(false)) {
                var graph = new GraphFactory(lap);

                // binary classification rounds each output to 0 or 1 and compares each output against the binary classification targets
                var errorMetric = graph.ErrorMetric.BinaryClassification;

                // configure the network properties
                graph.CurrentPropertySet
                .Use(graph.GradientDescent.Adam)
                .Use(graph.WeightInitialisation.Xavier)
                ;

                for (var i = 0; i < CLASSIFICATION_COUNT; i++)
                {
                    var trainingSet = dataSets[i].Training;
                    var testSet     = dataSets[i].Test;
                    Console.WriteLine("Training on {0}", classificationLabel[i]);

                    // train and evaluate a naive bayes classifier
                    var naiveBayes = trainingSet.TrainNaiveBayes().CreateClassifier();
                    Console.WriteLine("\tNaive bayes accuracy: {0:P}", testSet
                                      .Classify(naiveBayes)
                                      .Average(d => d.Row.GetField <string>(attributeCount) == d.Classification ? 1.0 : 0.0)
                                      );

                    // train a logistic regression classifier
                    var logisticRegression = trainingSet
                                             .TrainLogisticRegression(lap, 2500, 0.25f, 0.01f)
                                             .CreatePredictor(lap)
                                             .ConvertToRowClassifier(attributeColumns)
                    ;
                    Console.WriteLine("\tLogistic regression accuracy: {0:P}", testSet
                                      .Classify(logisticRegression)
                                      .Average(d => d.Row.GetField <string>(attributeCount) == d.Classification ? 1.0 : 0.0)
                                      );

                    // train and evaluate k nearest neighbours
                    var knn = trainingSet.TrainKNearestNeighbours().CreateClassifier(lap, 10);
                    Console.WriteLine("\tK nearest neighbours accuracy: {0:P}", testSet
                                      .Classify(knn)
                                      .Average(d => d.Row.GetField <string>(attributeCount) == d.Classification ? 1.0 : 0.0)
                                      );

                    // create a training engine
                    const float TRAINING_RATE = 0.1f;
                    var         trainingData  = graph.CreateDataSource(trainingSet);
                    var         testData      = trainingData.CloneWith(testSet);
                    var         engine        = graph.CreateTrainingEngine(trainingData, TRAINING_RATE, 64);

                    // build the network
                    const int HIDDEN_LAYER_SIZE = 64, TRAINING_ITERATIONS = 2000;
                    var       network = graph.Connect(engine)
                                        .AddFeedForward(HIDDEN_LAYER_SIZE)
                                        .Add(graph.SigmoidActivation())
                                        .AddDropOut(dropOutPercentage: 0.5f)
                                        .AddFeedForward(engine.DataSource.OutputSize)
                                        .Add(graph.SigmoidActivation())
                                        .AddBackpropagation(errorMetric)
                    ;

                    // train the network
                    engine.Train(TRAINING_ITERATIONS, testData, errorMetric, null, 200);
                }
            }
        }
Esempio n. 19
0
        static void SequenceToSequence()
        {
            const int SEQUENCE_LENGTH = 5;
            var       grammar         = new SequenceClassification(8, SEQUENCE_LENGTH, SEQUENCE_LENGTH, true, false);
            var       sequences       = grammar.GenerateSequences().Take(2000).ToList();
            var       builder         = BrightWireProvider.CreateDataTableBuilder();

            builder.AddColumn(ColumnType.Matrix, "Input");
            builder.AddColumn(ColumnType.Matrix, "Output");

            foreach (var sequence in sequences)
            {
                var encodedSequence  = grammar.Encode(sequence);
                var reversedSequence = new FloatMatrix {
                    Row = encodedSequence.Row.Reverse().Take(SEQUENCE_LENGTH - 1).ToArray()
                };
                builder.Add(encodedSequence, reversedSequence);
            }
            var data = builder.Build().Split(0);

            using (var lap = BrightWireProvider.CreateLinearAlgebra()) {
                var graph       = new GraphFactory(lap);
                var errorMetric = graph.ErrorMetric.BinaryClassification;

                // create the property set
                var propertySet = graph.CurrentPropertySet
                                  .Use(graph.GradientDescent.RmsProp)
                                  .Use(graph.WeightInitialisation.Xavier)
                ;

                const int   BATCH_SIZE        = 16;
                int         HIDDEN_LAYER_SIZE = 64;
                const float TRAINING_RATE     = 0.1f;

                // create the encoder
                var encoderLearningContext = graph.CreateLearningContext(TRAINING_RATE, BATCH_SIZE, TrainingErrorCalculation.Fast, true);
                var encoderMemory          = new float[HIDDEN_LAYER_SIZE];
                var trainingData           = graph.CreateDataSource(data.Training, encoderLearningContext, wb => wb
                                                                    .AddLstm(encoderMemory, "encoder")
                                                                    .WriteNodeMemoryToSlot("shared-memory", wb.Find("encoder") as IHaveMemoryNode)
                                                                    .AddFeedForward(grammar.DictionarySize)
                                                                    .Add(graph.SigmoidActivation())
                                                                    .AddBackpropagationThroughTime(errorMetric)
                                                                    );
                var testData = trainingData.CloneWith(data.Test);

                // create the engine
                var engine = graph.CreateTrainingEngine(trainingData, TRAINING_RATE, BATCH_SIZE);
                engine.LearningContext.ScheduleLearningRate(30, TRAINING_RATE / 3);
                engine.LearningContext.ScheduleLearningRate(40, TRAINING_RATE / 9);

                // create the decoder
                var decoderMemory = new float[HIDDEN_LAYER_SIZE];
                var wb2           = graph.Connect(engine);
                wb2
                .JoinInputWithMemory("shared-memory")
                .IncrementSizeBy(HIDDEN_LAYER_SIZE)
                .AddLstm(decoderMemory, "decoder")
                .AddFeedForward(trainingData.OutputSize)
                .Add(graph.SigmoidActivation())
                .AddBackpropagationThroughTime(errorMetric)
                ;

                engine.Train(50, testData, errorMetric);

                //var dataSourceModel = (trainingData as IAdaptiveDataSource).GetModel();
                //var testData2 = graph.CreateDataSource(data.Test, dataSourceModel);
                //var networkGraph = engine.Graph;
                //var executionEngine = graph.CreateEngine(networkGraph);

                //var output = executionEngine.Execute(testData2);
                //Console.WriteLine(output.Average(o => o.CalculateError(errorMetric)));
            }
        }
        /// <summary>
        /// Classifies text into either positive or negative sentiment
        /// The data files can be downloaded from https://archive.ics.uci.edu/ml/datasets/Sentiment+Labelled+Sentences
        /// </summary>
        /// <param name="dataFilesPath">Path to extracted data files</param>
        public static void SentimentClassification(string dataFilesPath)
        {
            var files          = new[] { "amazon_cells_labelled.txt", "imdb_labelled.txt", "yelp_labelled.txt" };
            var LINE_SEPARATOR = "\n".ToCharArray();
            var SEPARATOR      = "\t".ToCharArray();
            var stringTable    = new StringTableBuilder();
            var sentimentData  = files.SelectMany(f =>
                                                  File.ReadAllText(dataFilesPath + f).Split(LINE_SEPARATOR).
                                                  Where(l => !string.IsNullOrWhiteSpace(l)).Select(l => l.Split(SEPARATOR)).
                                                  Select(s => Tuple.Create(Tokenise(s[0]), s[1][0] == '1' ? "positive" : "negative")).
                                                  Where(d => d.Item1.Any())).Shuffle(0).ToList();
            var splitSentimentData = sentimentData.Split();

            // build training and test classification bag
            var trainingClassificationBag =
                BuildIndexedClassifications(splitSentimentData.Training, stringTable);
            var testClassificationBag =
                BuildIndexedClassifications(splitSentimentData.Test, stringTable);

            // train a bernoulli naive bayes classifier
            var bernoulli = trainingClassificationBag.TrainBernoulliNaiveBayes();

            Console.WriteLine("Bernoulli accuracy: {0:P}",
                              testClassificationBag.Classify(bernoulli.CreateClassifier()).Average(r => r.Score));

            // train a multinomial naive bayes classifier
            var multinomial = trainingClassificationBag.TrainMultinomialNaiveBayes();

            Console.WriteLine("Multinomial accuracy: {0:P}",
                              testClassificationBag.Classify(multinomial.CreateClassifier()).Average(r => r.Score));

            // convert the index lists to vectors and normalise along the way
            var sentimentDataTable = BuildIndexedClassifications(sentimentData, stringTable).
                                     ConvertToTable().Normalise(NormalisationType.Standard);
            var vectoriser        = sentimentDataTable.GetVectoriser();
            var sentimentDataSet  = sentimentDataTable.Split(0);
            var dataTableAnalysis = sentimentDataTable.GetAnalysis();

            using (var lap = BrightWireProvider.CreateLinearAlgebra())
            {
                var graph            = new GraphFactory(lap);
                var trainingData     = graph.CreateDataSource(sentimentDataSet.Training, vectoriser);
                var testData         = graph.CreateDataSource(sentimentDataSet.Test, vectoriser);
                var indexListEncoder = (IIndexListEncoder)trainingData;

                // use a one hot encoding error metric, rmsprop gradient descent and xavier weight initialisation
                var errorMetric = graph.ErrorMetric.OneHotEncoding;
                var propertySet = graph.CurrentPropertySet.Use(graph.GradientDescent.RmsProp).
                                  Use(graph.WeightInitialisation.Xavier);
                var engine = graph.CreateTrainingEngine(trainingData, 0.3f);
                engine.LearningContext.ScheduleLearningRate(5, 0.1f);
                engine.LearningContext.ScheduleLearningRate(11, 1f);
                engine.LearningContext.ScheduleLearningRate(15, 0.3f);

                // train a neural network classifier
                var neuralNetworkWire = graph.Connect(engine).AddFeedForward(512, "layer1")
                                        //.AddBatchNormalisation()
                                        .Add(graph.ReluActivation()).AddDropOut(0.5f).
                                        AddFeedForward(trainingData.OutputSize, "layer2").Add(graph.ReluActivation()).
                                        AddBackpropagation(errorMetric, "first-network");

                // train the network
                Console.WriteLine("Training neural network classifier...");
                const int  TRAINING_ITERATIONS = 10;
                GraphModel bestNetwork         = null;
                engine.Train(TRAINING_ITERATIONS, testData, errorMetric, network => bestNetwork = network);
                if (bestNetwork != null)
                {
                    engine.LoadParametersFrom(bestNetwork.Graph);
                }
                var firstClassifier = graph.CreateEngine(engine.Graph);

                // stop the backpropagation to the first neural network
                engine.LearningContext.EnableNodeUpdates(neuralNetworkWire.Find("layer1"), false);
                engine.LearningContext.EnableNodeUpdates(neuralNetworkWire.Find("layer2"), false);

                // create the bernoulli classifier wire
                var bernoulliClassifier = bernoulli.CreateClassifier();
                var bernoulliWire       = graph.Connect(engine).AddClassifier(bernoulliClassifier,
                                                                              sentimentDataSet.Training, dataTableAnalysis);

                // create the multinomial classifier wire
                var multinomialClassifier = multinomial.CreateClassifier();
                var multinomialWire       = graph.Connect(engine).AddClassifier(multinomialClassifier,
                                                                                sentimentDataSet.Training, dataTableAnalysis);

                // join the bernoulli, multinomial and neural network classification outputs
                var firstNetwork = neuralNetworkWire.Find("first-network");
                var joined       = graph.Join(multinomialWire,
                                              graph.Join(bernoulliWire, graph.Connect(trainingData.OutputSize, firstNetwork)));

                // train an additional classifier on the output of the previous three classifiers
                joined.AddFeedForward(outputSize: 64).Add(graph.ReluActivation()).
                AddDropOut(dropOutPercentage: 0.5f).AddFeedForward(trainingData.OutputSize).
                Add(graph.ReluActivation()).AddBackpropagation(errorMetric);

                // train the network again
                Console.WriteLine("Training stacked neural network classifier...");
                GraphModel bestStackedNetwork = null;
                engine.Train(10, testData, errorMetric, network => bestStackedNetwork = network);
                if (bestStackedNetwork != null)
                {
                    engine.LoadParametersFrom(bestStackedNetwork.Graph);
                }
                Console.WriteLine("Enter some text to test the classifiers...");
                while (true)
                {
                    Console.Write(">");
                    var line = Console.ReadLine();
                    if (string.IsNullOrWhiteSpace(line))
                    {
                        break;
                    }
                    var tokens    = Tokenise(line);
                    var indexList = new List <uint>();
                    foreach (var token in tokens)
                    {
                        if (stringTable.TryGetIndex(token, out uint stringIndex))
                        {
                            indexList.Add(stringIndex);
                        }
                    }

                    if (indexList.Any())
                    {
                        var queryTokens = indexList.GroupBy(d => d).
                                          Select(g => Tuple.Create(g.Key, (float)g.Count())).ToList();
                        var vector = new float[trainingData.InputSize];
                        foreach (var token in queryTokens)
                        {
                            vector[token.Item1] = token.Item2;
                        }
                        var indexList2   = IndexList.Create(indexList.ToArray());
                        var encodedInput = indexListEncoder.Encode(indexList2);
                        Console.WriteLine("Bernoulli classification: " +
                                          bernoulliClassifier.Classify(indexList2).First().Label);
                        Console.WriteLine("Multinomial classification: " +
                                          multinomialClassifier.Classify(indexList2).First().Label);
                        var result         = firstClassifier.Execute(encodedInput);
                        var classification = vectoriser.GetOutputLabel(1,
                                                                       (result.Output[0].Data[0] > result.Output[0].Data[1]) ? 0 : 1);
                        Console.WriteLine("Neural network classification: " + classification);
                        var stackedResult         = engine.Execute(encodedInput);
                        var stackedClassification = vectoriser.GetOutputLabel(1,
                                                                              (stackedResult.Output[0].Data[0] > stackedResult.Output[0].Data[1]) ? 0 : 1);
                        Console.WriteLine("Stack classification: " + stackedClassification);
                    }
                    else
                    {
                        Console.WriteLine("Sorry, none of those words have been seen before.");
                    }

                    Console.WriteLine();
                }
            }

            Console.WriteLine();
        }
Esempio n. 21
0
        /// <summary>
        /// Trains various classifiers on the Iris data set
        ///
        /// Tutorial available at http://www.jackdermody.net/brightwire/article/Introduction_to_Bright_Wire
        /// </summary>
        public static void IrisClassification()
        {
            // download the iris data set
            byte[] data;
            using (var client = new WebClient())
            {
                data = client.DownloadData(
                    "https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data");
            }

            // parse the iris CSV into a data table
            var dataTable = new StreamReader(new MemoryStream(data)).ParseCSV();

            // the last column is the classification target ("Iris-setosa", "Iris-versicolor", or "Iris-virginica")
            var targetColumnIndex = dataTable.TargetColumnIndex = dataTable.ColumnCount - 1;

            // split the data table into training and test tables
            var split = dataTable.Split(0);

            // train and evaluate a naive bayes classifier
            var naiveBayes = split.Training.TrainNaiveBayes();

            Console.WriteLine("Naive bayes accuracy: {0:P}",
                              split.Test.Classify(naiveBayes.CreateClassifier()).Average(d =>
                                                                                         d.Row.GetField <string>(targetColumnIndex) == d.Classification ? 1.0 : 0.0));

            // train and evaluate a decision tree classifier
            var decisionTree = split.Training.TrainDecisionTree();

            Console.WriteLine("Decision tree accuracy: {0:P}",
                              split.Test.Classify(decisionTree.CreateClassifier()).Average(d =>
                                                                                           d.Row.GetField <string>(targetColumnIndex) == d.Classification ? 1.0 : 0.0));

            // train and evaluate a random forest classifier
            var randomForest = split.Training.TrainRandomForest(500);

            Console.WriteLine("Random forest accuracy: {0:P}",
                              split.Test.Classify(randomForest.CreateClassifier()).Average(d =>
                                                                                           d.Row.GetField <string>(targetColumnIndex) == d.Classification ? 1.0 : 0.0));

            // fire up some linear algebra on the CPU
            using var lap = BrightWireProvider.CreateLinearAlgebra(false);
            // train and evaluate k nearest neighbours
            var knn = split.Training.TrainKNearestNeighbours();

            Console.WriteLine("K nearest neighbours accuracy: {0:P}",
                              split.Test.Classify(knn.CreateClassifier(lap, 10)).Average(d =>
                                                                                         d.Row.GetField <string>(targetColumnIndex) == d.Classification ? 1.0 : 0.0));

            // train and evaluate a mulitinomial logistic regression classifier
            var logisticRegression = split.Training.TrainMultinomialLogisticRegression(lap, 500, 0.1f);

            Console.WriteLine("Multinomial logistic regression accuracy: {0:P}",
                              split.Test.Classify(logisticRegression.CreateClassifier(lap)).Average(d =>
                                                                                                    d.Row.GetField <string>(targetColumnIndex) == d.Classification ? 1.0 : 0.0));

            // create a neural network graph factory
            var graph = new GraphFactory(lap);

            // the default data table -> vector conversion uses one hot encoding of the classification labels, so create a corresponding cost function
            var errorMetric = graph.ErrorMetric.OneHotEncoding;

            // create the property set (use rmsprop gradient descent optimisation)
            graph.CurrentPropertySet.Use(graph.RmsProp());

            // create the training and test data sources
            var trainingData = graph.CreateDataSource(split.Training);
            var testData     = trainingData.CloneWith(split.Test);

            // create a 4x3x3 neural network with sigmoid activations after each neural network
            const int   HIDDEN_LAYER_SIZE = 8, BATCH_SIZE = 8;
            const float LEARNING_RATE = 0.01f;
            var         engine        = graph.CreateTrainingEngine(trainingData, LEARNING_RATE, BATCH_SIZE,
                                                                   TrainingErrorCalculation.TrainingData);

            graph.Connect(engine).AddFeedForward(HIDDEN_LAYER_SIZE).Add(graph.SigmoidActivation()).
            AddDropOut(dropOutPercentage: 0.5f).AddFeedForward(engine.DataSource.OutputSize).
            Add(graph.SigmoidActivation()).AddBackpropagation(errorMetric);

            // train the network
            Console.WriteLine("Training a 4x8x3 neural network...");
            engine.Train(500, testData, errorMetric, null, 50);
        }
Esempio n. 22
0
        static void ReberPrediction()
        {
            // generate 500 extended reber grammar training examples
            var grammar   = new ReberGrammar();
            var sequences = grammar.GetExtended(10, 16).Take(500).ToList();

            // split the data into training and test sets
            var data = ReberGrammar.GetOneHot(sequences).Split(0);

            using var lap = BrightWireProvider.CreateLinearAlgebra();
            var graph = new GraphFactory(lap);

            // binary classification rounds each output to either 0 or 1
            var errorMetric = graph.ErrorMetric.BinaryClassification;

            // configure the network properties
            graph.CurrentPropertySet.Use(graph.GradientDescent.RmsProp).
            Use(graph.WeightInitialisation.Xavier);

            // create the engine
            var trainingData = graph.CreateDataSource(data.Training);
            var testData     = trainingData.CloneWith(data.Test);
            var engine       = graph.CreateTrainingEngine(trainingData, learningRate: 0.1f, batchSize: 32);

            // build the network
            const int HIDDEN_LAYER_SIZE = 32, TRAINING_ITERATIONS = 30;

            graph.Connect(engine).AddGru(HIDDEN_LAYER_SIZE).AddFeedForward(engine.DataSource.OutputSize).
            Add(graph.SigmoidActivation()).AddBackpropagationThroughTime(errorMetric);
            engine.Train(TRAINING_ITERATIONS, testData, errorMetric);

            // generate a sample sequence using the learned state transitions
            var networkGraph    = engine.Graph;
            var executionEngine = graph.CreateEngine(networkGraph);

            Console.WriteLine("Generating new reber sequences from the observed state probabilities...");
            for (var z = 0; z < 3; z++)
            {
                // prepare the first input
                var input = new float[ReberGrammar.Size];
                input[ReberGrammar.GetIndex('B')] = 1f;
                Console.Write("B");
                int index = 0, eCount = 0;
                using var executionContext = graph.CreateExecutionContext();
                var result = executionEngine.ExecuteSequential(index++, input, executionContext,
                                                               MiniBatchSequenceType.SequenceStart);
                for (var i = 0; i < 32; i++)
                {
                    var next = result.Output[0].Data.Select((v, j) => ((double)v, j)).
                               Where(d => d.Item1 >= 0.1f).ToList();
                    var distribution = new Categorical(next.Select(d => d.Item1).ToArray());
                    var nextIndex    = next[distribution.Sample()].Item2;
                    Console.Write(ReberGrammar.GetChar(nextIndex));
                    if (nextIndex == ReberGrammar.GetIndex('E') && ++eCount == 2)
                    {
                        break;
                    }
                    Array.Clear(input, 0, ReberGrammar.Size);
                    input[nextIndex] = 1f;
                    result           = executionEngine.ExecuteSequential(index++, input, executionContext,
                                                                         MiniBatchSequenceType.Standard);
                }

                Console.WriteLine();
            }
        }
Esempio n. 23
0
        /// <summary>
        /// Trains a feed forward neural net on the MNIST data set (handwritten digit recognition)
        /// The data files can be downloaded from http://yann.lecun.com/exdb/mnist/
        /// </summary>
        /// <param name="dataFilesPath">The path to a directory with the four extracted data files</param>
        /// <param name="outputModelPath">Optional path to save the best model to</param>
        static void MNISTConvolutional(string dataFilesPath, string outputModelPath = null)
        {
            using var lap = BrightWireGpuProvider.CreateLinearAlgebra();
            var graph = new GraphFactory(lap);

            Console.Write("Loading training data...");
            var mnistTraining = Mnist.Load(dataFilesPath + "train-labels.idx1-ubyte",
                                           dataFilesPath + "train-images.idx3-ubyte");
            var mnistTest = Mnist.Load(dataFilesPath + "t10k-labels.idx1-ubyte",
                                       dataFilesPath + "t10k-images.idx3-ubyte");
            var trainingData =
                _BuildTensors(graph, null, mnistTraining /*.Where(d => d.Label < 2).ToList()*/);
            var testData = _BuildTensors(graph, trainingData,
                                         mnistTest /*.Where(d => d.Label < 2).ToList()*/);

            Console.WriteLine(
                $"done - {trainingData.RowCount} training images and {testData.RowCount} test images loaded");

            // one hot encoding uses the index of the output vector's maximum value as the classification label
            var errorMetric = graph.ErrorMetric.OneHotEncoding;

            // configure the network properties
            graph.CurrentPropertySet.Use(graph.GradientDescent.Adam).Use(
                graph.GaussianWeightInitialisation(false, 0.1f, GaussianVarianceCalibration.SquareRoot2N));

            // create the network
            const int   HIDDEN_LAYER_SIZE = 1024, TRAINING_ITERATIONS = 20;
            const float LEARNING_RATE = 0.05f;
            var         engine        = graph.CreateTrainingEngine(trainingData, LEARNING_RATE);

            if (!string.IsNullOrWhiteSpace(outputModelPath) && File.Exists(outputModelPath))
            {
                Console.WriteLine("Loading existing model from: " + outputModelPath);
                using var file = new FileStream(outputModelPath, FileMode.Open, FileAccess.Read);
                var model = Serializer.Deserialize <GraphModel>(file);
                engine = graph.CreateTrainingEngine(trainingData, model.Graph, LEARNING_RATE);
            }
            else
            {
                graph.Connect(engine).
                AddConvolutional(filterCount: 16, padding: 2, filterWidth: 5, filterHeight: 5, xStride: 1,
                                 yStride: 1, shouldBackpropagate: false).Add(graph.LeakyReluActivation()).
                AddMaxPooling(filterWidth: 2, filterHeight: 2, xStride: 2, yStride: 2).
                AddConvolutional(filterCount: 32, padding: 2, filterWidth: 5, filterHeight: 5, xStride: 1,
                                 yStride: 1).Add(graph.LeakyReluActivation()).
                AddMaxPooling(filterWidth: 2, filterHeight: 2, xStride: 2, yStride: 2).Transpose().
                AddFeedForward(HIDDEN_LAYER_SIZE).Add(graph.LeakyReluActivation()).
                AddDropOut(dropOutPercentage: 0.5f).AddFeedForward(trainingData.OutputSize).
                Add(graph.SoftMaxActivation()).AddBackpropagation(errorMetric);
            }

            // lower the learning rate over time
            engine.LearningContext.ScheduleLearningRate(15, LEARNING_RATE / 2);

            // train the network for twenty iterations, saving the model on each improvement
            Models.ExecutionGraph bestGraph = null;
            engine.Train(TRAINING_ITERATIONS, testData, errorMetric, model =>
            {
                bestGraph = model.Graph;
                if (!string.IsNullOrWhiteSpace(outputModelPath))
                {
                    using var file = new FileStream(outputModelPath, FileMode.Create, FileAccess.Write);
                    Serializer.Serialize(file, model);
                }
            });

            // export the final model and execute it on the training set
            var executionEngine = graph.CreateEngine(bestGraph ?? engine.Graph);
            var output          = executionEngine.Execute(testData);

            Console.WriteLine($"Final accuracy: {output.Average(o => o.CalculateError(errorMetric)):P2}");

            // execute the model with a single image
            var tensor     = mnistTest.First().AsFloatTensor.Tensor;
            var singleData = graph.CreateDataSource(new[] { tensor });
            var result     = executionEngine.Execute(singleData);
            var prediction = result.Single().Output.Single().MaximumIndex();
        }
Esempio n. 24
0
        public static void IntegerAddition()
        {
            // generate 1000 random integer additions (split into training and test sets)
            var data = BinaryIntegers.Addition(1000, false).Split(0);

            using (var lap = BrightWireProvider.CreateLinearAlgebra(false)) {
                var graph       = new GraphFactory(lap);
                var errorMetric = graph.ErrorMetric.BinaryClassification;

                // modify the property set
                var propertySet = graph.CurrentPropertySet
                                  .Use(graph.GradientDescent.Adam)
                                  .Use(graph.WeightInitialisation.Xavier)
                ;

                // create the engine
                var trainingData = graph.CreateDataSource(data.Training);
                var testData     = trainingData.CloneWith(data.Test);
                var engine       = graph.CreateTrainingEngine(trainingData, 0.01f, 16);

                // build the network
                const int HIDDEN_LAYER_SIZE = 32;
                var       memory            = new float[HIDDEN_LAYER_SIZE];
                var       network           = graph.Connect(engine)
                                              .AddSimpleRecurrent(graph.ReluActivation(), memory)
                                              .AddFeedForward(engine.DataSource.OutputSize)
                                              .Add(graph.ReluActivation())
                                              .AddBackpropagationThroughTime(errorMetric)
                ;

                // train the network
                GraphModel bestNetwork = null;
                engine.Train(30, testData, errorMetric, bn => bestNetwork = bn);

                // export the graph and verify it against some unseen integers
                var executionEngine = graph.CreateEngine(bestNetwork.Graph);
                var testData2       = graph.CreateDataSource(BinaryIntegers.Addition(8, true));
                var results         = executionEngine.Execute(testData2);

                // group the output
                var groupedResults = new Tuple <FloatVector[], FloatVector[], FloatVector[]> [8];
                for (var i = 0; i < 8; i++)
                {
                    var input  = new FloatVector[32];
                    var target = new FloatVector[32];
                    var output = new FloatVector[32];
                    for (var j = 0; j < 32; j++)
                    {
                        input[j]  = results[j].Input[0][i];
                        target[j] = results[j].Target[i];
                        output[j] = results[j].Output[i];
                    }
                    groupedResults[i] = Tuple.Create(input, target, output);
                }

                // write the results
                foreach (var result in groupedResults)
                {
                    Console.Write("First:     ");
                    foreach (var item in result.Item1)
                    {
                        _WriteBinary(item.Data[0]);
                    }
                    Console.WriteLine();

                    Console.Write("Second:    ");
                    foreach (var item in result.Item1)
                    {
                        _WriteBinary(item.Data[1]);
                    }
                    Console.WriteLine();
                    Console.WriteLine("           --------------------------------");

                    Console.Write("Expected:  ");
                    foreach (var item in result.Item2)
                    {
                        _WriteBinary(item.Data[0]);
                    }
                    Console.WriteLine();

                    Console.Write("Predicted: ");
                    foreach (var item in result.Item3)
                    {
                        _WriteBinary(item.Data[0]);
                    }
                    Console.WriteLine();
                    Console.WriteLine();
                }
            }
        }
Esempio n. 25
0
        /// <summary>
        /// Trains a feed forward neural net on the emotion dataset
        /// http://lpis.csd.auth.gr/publications/tsoumakas-ismir08.pdf
        /// The data files can be downloaded from https://downloads.sourceforge.net/project/mulan/datasets/emotions.rar
        /// </summary>
        /// <param name="dataFilePath"></param>
        public static void MultiLabelSingleClassifier(string dataFilePath)
        {
            var emotionData           = _LoadEmotionData(dataFilePath);
            var attributeColumns      = Enumerable.Range(0, emotionData.ColumnCount - CLASSIFICATION_COUNT).ToList();
            var classificationColumns = Enumerable.Range(emotionData.ColumnCount - CLASSIFICATION_COUNT, CLASSIFICATION_COUNT).ToList();

            // create a new data table with a vector input column and a vector output column
            var dataTableBuilder = BrightWireProvider.CreateDataTableBuilder();

            dataTableBuilder.AddColumn(ColumnType.Vector, "Attributes");
            dataTableBuilder.AddColumn(ColumnType.Vector, "Target", isTarget: true);
            emotionData.ForEach(row => {
                var input  = FloatVector.Create(row.GetFields <float>(attributeColumns).ToArray());
                var target = FloatVector.Create(row.GetFields <float>(classificationColumns).ToArray());
                dataTableBuilder.Add(input, target);
                return(true);
            });
            var data = dataTableBuilder.Build().Split(0);

            // train a neural network
            using (var lap = BrightWireProvider.CreateLinearAlgebra(false)) {
                var graph = new GraphFactory(lap);

                // binary classification rounds each output to 0 or 1 and compares each output against the binary classification targets
                var errorMetric = graph.ErrorMetric.BinaryClassification;

                // configure the network properties
                graph.CurrentPropertySet
                .Use(graph.GradientDescent.Adam)
                .Use(graph.WeightInitialisation.Xavier)
                ;

                // create a training engine
                const float TRAINING_RATE = 0.3f;
                var         trainingData  = graph.CreateDataSource(data.Training);
                var         testData      = trainingData.CloneWith(data.Test);
                var         engine        = graph.CreateTrainingEngine(trainingData, TRAINING_RATE, 128);

                // build the network
                const int HIDDEN_LAYER_SIZE = 64, TRAINING_ITERATIONS = 2000;
                var       network = graph.Connect(engine)
                                    .AddFeedForward(HIDDEN_LAYER_SIZE)
                                    .Add(graph.SigmoidActivation())
                                    .AddDropOut(dropOutPercentage: 0.5f)
                                    .AddFeedForward(engine.DataSource.OutputSize)
                                    .Add(graph.SigmoidActivation())
                                    .AddBackpropagation(errorMetric)
                ;

                // train the network
                Models.ExecutionGraph bestGraph = null;
                engine.Train(TRAINING_ITERATIONS, testData, errorMetric, model => bestGraph = model.Graph, 50);

                // export the final model and execute it on the training set
                var executionEngine = graph.CreateEngine(bestGraph ?? engine.Graph);
                var output          = executionEngine.Execute(testData);

                // output the results
                var rowIndex = 0;
                foreach (var item in output)
                {
                    var sb = new StringBuilder();
                    foreach (var classification in item.Output.Zip(item.Target, (o, t) => (Output: o, Target: t)))
                    {
                        var columnIndex = 0;
                        sb.AppendLine($"{rowIndex++}) ");
                        foreach (var column in classification.Output.Data.Zip(classification.Target.Data,
                                                                              (o, t) => (Output: o, Target: t)))
                        {
                            var prediction = column.Output >= 0.5f ? "true" : "false";
                            var actual     = column.Target >= 0.5f ? "true" : "false";
                            sb.AppendLine($"\t{columnIndex++}) predicted {prediction} (expected {actual})");
                        }
                    }
                    Console.WriteLine(sb.ToString());
                }
            }
        }
Esempio n. 26
0
        protected override void OnCreate(Bundle savedInstanceState)
        {
            base.OnCreate(savedInstanceState);

            var sb = new StringBuilder();

            using (var lap = BrightWireProvider.CreateLinearAlgebra()) {
                // Create some training data that the network will learn.  The XOR pattern looks like:
                // 0 0 => 0
                // 1 0 => 1
                // 0 1 => 1
                // 1 1 => 0
                var data = Xor.Get();

                // create the graph
                var graph       = new GraphFactory(lap);
                var errorMetric = graph.ErrorMetric.CrossEntropy;
                graph.CurrentPropertySet
                // use rmsprop gradient descent optimisation
                .Use(graph.GradientDescent.RmsProp)

                // and xavier weight initialisation
                .Use(graph.WeightInitialisation.Gaussian)
                ;

                // create the engine
                var testData = graph.CreateDataSource(data);
                var engine   = graph.CreateTrainingEngine(testData, 0.1f, 4);

                // create the network
                const int HIDDEN_LAYER_SIZE = 6;
                graph.Connect(engine)
                // create a feed forward layer with sigmoid activation
                .AddFeedForward(HIDDEN_LAYER_SIZE)
                .Add(graph.SigmoidActivation())

                // create a second feed forward layer with sigmoid activation
                .AddFeedForward(engine.DataSource.OutputSize)
                .Add(graph.SigmoidActivation())

                // backpropagate the error signal at the end of the graph
                .AddBackpropagation(errorMetric)
                ;

                // train the network
                var executionContext = graph.CreateExecutionContext();
                for (var i = 0; i < 1000; i++)
                {
                    var trainingError = engine.Train(executionContext);
                    if (i % 100 == 0)
                    {
                        engine.Test(testData, errorMetric);
                    }
                }
                engine.Test(testData, errorMetric);

                // create a new network to execute the learned network
                var networkGraph    = engine.Graph;
                var executionEngine = graph.CreateEngine(networkGraph);
                var output          = executionEngine.Execute(testData);

                // print the learnt values
                foreach (var item in output)
                {
                    foreach (var index in item.MiniBatchSequence.MiniBatch.Rows)
                    {
                        var row    = data.GetRow(index);
                        var result = item.Output[index];
                        sb.AppendLine($"{row.GetField<int>(0)} XOR {row.GetField<int>(1)} = {result.Data[0]}");
                    }
                }
            }

            // Set our view from the "main" layout resource
            SetContentView(Resource.Layout.Main);

            var outputText = FindViewById <TextView>(Resource.Id.outputText);

            outputText.Text = sb.ToString();
        }
Esempio n. 27
0
        static void ReberPrediction()
        {
            var grammar   = new ReberGrammar(false);
            var sequences = grammar.GetExtended(10).Take(500).ToList();
            var data      = ReberGrammar.GetOneHot(sequences).Split(0);

            using (var lap = BrightWireProvider.CreateLinearAlgebra(false)) {
                var graph       = new GraphFactory(lap);
                var errorMetric = graph.ErrorMetric.BinaryClassification;
                var propertySet = graph.CurrentPropertySet
                                  .Use(graph.GradientDescent.RmsProp)
                                  .Use(graph.WeightInitialisation.Xavier)
                ;

                // create the engine
                var trainingData = graph.CreateDataSource(data.Training);
                var testData     = trainingData.CloneWith(data.Test);
                var engine       = graph.CreateTrainingEngine(trainingData, 0.1f, 32);

                // build the network
                const int HIDDEN_LAYER_SIZE = 64;
                var       memory            = new float[HIDDEN_LAYER_SIZE];
                var       network           = graph.Connect(engine)
                                              //.AddSimpleRecurrent(graph.ReluActivation(), memory)
                                              .AddGru(memory)
                                              //.AddRan(memory)
                                              //.AddLstm(memory)

                                              .AddFeedForward(engine.DataSource.OutputSize)
                                              .Add(graph.TanhActivation())
                                              .AddBackpropagationThroughTime(errorMetric)
                ;

                engine.Train(30, testData, errorMetric);

                // generate sample sequence
                var networkGraph = engine.Graph;
                using (var executionContext = graph.CreateExecutionContext()) {
                    var executionEngine = graph.CreateEngine(networkGraph);

                    Console.WriteLine("Generating a new reber sequence...");
                    var input = new float[ReberGrammar.Size];
                    input[ReberGrammar.GetIndex('B')] = 1f;
                    Console.Write("B");

                    int index = 0, eCount = 0;
                    var result = executionEngine.ExecuteSequential(index++, input, executionContext, MiniBatchSequenceType.SequenceStart);
                    for (var i = 0; i < 32; i++)
                    {
                        var nextIndex = result.Output[0].Data
                                        .Select((v, j) => (v, j))
                                        .Where(d => d.Item1 >= 0.5f)
                                        .Select(d => d.Item2)
                                        .Shuffle()
                                        .FirstOrDefault()
                        ;
                        if (index == 0)
                        {
                            break;
                        }

                        Console.Write(ReberGrammar.GetChar(nextIndex));
                        if (nextIndex == ReberGrammar.GetIndex('E') && ++eCount == 2)
                        {
                            break;
                        }

                        input            = new float[ReberGrammar.Size];
                        input[nextIndex] = 1f;
                        result           = executionEngine.ExecuteSequential(index++, input, executionContext, MiniBatchSequenceType.Standard);
                    }
                    Console.WriteLine();
                }
            }
        }