public void TiedAutoEncoder()
        {
            const int DATA_SIZE = 1000, REDUCED_SIZE = 200;

            // create some random data
            var rand    = new Random();
            var builder = BrightWireProvider.CreateDataTableBuilder();

            builder.AddVectorColumn(DATA_SIZE, "Input");
            builder.AddVectorColumn(DATA_SIZE, "Output", true);
            for (var i = 0; i < 100; i++)
            {
                var vector = new FloatVector {
                    Data = Enumerable.Range(0, DATA_SIZE).Select(j => Convert.ToSingle(rand.NextDouble())).ToArray()
                };
                builder.Add(vector, vector);
            }
            var dataTable = builder.Build();

            // build the autoencoder with tied weights
            var graph       = new GraphFactory(_lap);
            var dataSource  = graph.CreateDataSource(dataTable);
            var engine      = graph.CreateTrainingEngine(dataSource, 0.03f, 32);
            var errorMetric = graph.ErrorMetric.Quadratic;

            graph.CurrentPropertySet
            .Use(graph.RmsProp())
            .Use(graph.WeightInitialisation.Xavier)
            ;

            graph.Connect(engine)
            .AddFeedForward(REDUCED_SIZE, "layer")
            .Add(graph.TanhActivation())
            .AddTiedFeedForward(engine.Start.FindByName("layer") as IFeedForward)
            .Add(graph.TanhActivation())
            .AddBackpropagation(errorMetric)
            ;
            using (var executionContext = graph.CreateExecutionContext()) {
                for (var i = 0; i < 2; i++)
                {
                    var trainingError = engine.Train(executionContext);
                }
            }
            var networkGraph    = engine.Graph;
            var executionEngine = graph.CreateEngine(networkGraph);
            var results         = executionEngine.Execute(dataTable.GetRow(0).GetField <FloatVector>(0).Data);
        }
Exemple #2
0
        protected override void OnCreate(Bundle savedInstanceState)
        {
            base.OnCreate(savedInstanceState);

            var sb = new StringBuilder();

            using (var lap = BrightWireProvider.CreateLinearAlgebra()) {
                // Create some training data that the network will learn.  The XOR pattern looks like:
                // 0 0 => 0
                // 1 0 => 1
                // 0 1 => 1
                // 1 1 => 0
                var data = Xor.Get();

                // create the graph
                var graph       = new GraphFactory(lap);
                var errorMetric = graph.ErrorMetric.CrossEntropy;
                graph.CurrentPropertySet
                // use rmsprop gradient descent optimisation
                .Use(graph.GradientDescent.RmsProp)

                // and xavier weight initialisation
                .Use(graph.WeightInitialisation.Gaussian)
                ;

                // create the engine
                var testData = graph.CreateDataSource(data);
                var engine   = graph.CreateTrainingEngine(testData, 0.1f, 4);

                // create the network
                const int HIDDEN_LAYER_SIZE = 6;
                graph.Connect(engine)
                // create a feed forward layer with sigmoid activation
                .AddFeedForward(HIDDEN_LAYER_SIZE)
                .Add(graph.SigmoidActivation())

                // create a second feed forward layer with sigmoid activation
                .AddFeedForward(engine.DataSource.OutputSize)
                .Add(graph.SigmoidActivation())

                // backpropagate the error signal at the end of the graph
                .AddBackpropagation(errorMetric)
                ;

                // train the network
                var executionContext = graph.CreateExecutionContext();
                for (var i = 0; i < 1000; i++)
                {
                    var trainingError = engine.Train(executionContext);
                    if (i % 100 == 0)
                    {
                        engine.Test(testData, errorMetric);
                    }
                }
                engine.Test(testData, errorMetric);

                // create a new network to execute the learned network
                var networkGraph    = engine.Graph;
                var executionEngine = graph.CreateEngine(networkGraph);
                var output          = executionEngine.Execute(testData);

                // print the learnt values
                foreach (var item in output)
                {
                    foreach (var index in item.MiniBatchSequence.MiniBatch.Rows)
                    {
                        var row    = data.GetRow(index);
                        var result = item.Output[index];
                        sb.AppendLine($"{row.GetField<int>(0)} XOR {row.GetField<int>(1)} = {result.Data[0]}");
                    }
                }
            }

            // Set our view from the "main" layout resource
            SetContentView(Resource.Layout.Main);

            var outputText = FindViewById <TextView>(Resource.Id.outputText);

            outputText.Text = sb.ToString();
        }
Exemple #3
0
        public static void XOR()
        {
            using (var lap = BrightWireProvider.CreateLinearAlgebra()) {
                // Create some training data that the network will learn.  The XOR pattern looks like:
                // 0 0 => 0
                // 1 0 => 1
                // 0 1 => 1
                // 1 1 => 0
                var data = Xor.Get();

                // create the graph
                var graph       = new GraphFactory(lap);
                var errorMetric = graph.ErrorMetric.CrossEntropy;
                graph.CurrentPropertySet
                // use rmsprop gradient descent optimisation
                .Use(graph.GradientDescent.RmsProp)

                // and xavier weight initialisation
                .Use(graph.WeightInitialisation.Gaussian)
                ;

                // create the engine
                var testData = graph.CreateDataSource(data);
                var engine   = graph.CreateTrainingEngine(testData, 0.1f, 4);

                // create the network
                const int HIDDEN_LAYER_SIZE = 6;
                graph.Connect(engine)
                // create a feed forward layer with sigmoid activation
                .AddFeedForward(HIDDEN_LAYER_SIZE)
                .Add(graph.SigmoidActivation())

                // create a second feed forward layer with sigmoid activation
                .AddFeedForward(engine.DataSource.OutputSize)
                .Add(graph.SigmoidActivation())

                // backpropagate the error signal at the end of the graph
                .AddBackpropagation(errorMetric)
                ;

                // train the network
                var executionContext = graph.CreateExecutionContext();
                for (var i = 0; i < 1000; i++)
                {
                    var trainingError = engine.Train(executionContext);
                    if (i % 100 == 0)
                    {
                        engine.Test(testData, errorMetric);
                    }
                }
                engine.Test(testData, errorMetric);

                // create a new network to execute the learned network
                var networkGraph    = engine.Graph;
                var executionEngine = graph.CreateEngine(networkGraph);
                var output          = executionEngine.Execute(testData);
                Console.WriteLine(output.Average(o => o.CalculateError(errorMetric)));

                // print the learnt values
                foreach (var item in output)
                {
                    foreach (var index in item.MiniBatchSequence.MiniBatch.Rows)
                    {
                        var row    = data.GetRow(index);
                        var result = item.Output[index];
                        Console.WriteLine($"{row.GetField<int>(0)} XOR {row.GetField<int>(1)} = {result.Data[0]}");
                    }
                }
            }
        }
        static void ReberPrediction()
        {
            // generate 500 extended reber grammar training examples
            var grammar   = new ReberGrammar();
            var sequences = grammar.GetExtended(10, 16).Take(500).ToList();

            // split the data into training and test sets
            var data = ReberGrammar.GetOneHot(sequences).Split(0);

            using var lap = BrightWireProvider.CreateLinearAlgebra();
            var graph = new GraphFactory(lap);

            // binary classification rounds each output to either 0 or 1
            var errorMetric = graph.ErrorMetric.BinaryClassification;

            // configure the network properties
            graph.CurrentPropertySet.Use(graph.GradientDescent.RmsProp).
            Use(graph.WeightInitialisation.Xavier);

            // create the engine
            var trainingData = graph.CreateDataSource(data.Training);
            var testData     = trainingData.CloneWith(data.Test);
            var engine       = graph.CreateTrainingEngine(trainingData, learningRate: 0.1f, batchSize: 32);

            // build the network
            const int HIDDEN_LAYER_SIZE = 32, TRAINING_ITERATIONS = 30;

            graph.Connect(engine).AddGru(HIDDEN_LAYER_SIZE).AddFeedForward(engine.DataSource.OutputSize).
            Add(graph.SigmoidActivation()).AddBackpropagationThroughTime(errorMetric);
            engine.Train(TRAINING_ITERATIONS, testData, errorMetric);

            // generate a sample sequence using the learned state transitions
            var networkGraph    = engine.Graph;
            var executionEngine = graph.CreateEngine(networkGraph);

            Console.WriteLine("Generating new reber sequences from the observed state probabilities...");
            for (var z = 0; z < 3; z++)
            {
                // prepare the first input
                var input = new float[ReberGrammar.Size];
                input[ReberGrammar.GetIndex('B')] = 1f;
                Console.Write("B");
                int index = 0, eCount = 0;
                using var executionContext = graph.CreateExecutionContext();
                var result = executionEngine.ExecuteSequential(index++, input, executionContext,
                                                               MiniBatchSequenceType.SequenceStart);
                for (var i = 0; i < 32; i++)
                {
                    var next = result.Output[0].Data.Select((v, j) => ((double)v, j)).
                               Where(d => d.Item1 >= 0.1f).ToList();
                    var distribution = new Categorical(next.Select(d => d.Item1).ToArray());
                    var nextIndex    = next[distribution.Sample()].Item2;
                    Console.Write(ReberGrammar.GetChar(nextIndex));
                    if (nextIndex == ReberGrammar.GetIndex('E') && ++eCount == 2)
                    {
                        break;
                    }
                    Array.Clear(input, 0, ReberGrammar.Size);
                    input[nextIndex] = 1f;
                    result           = executionEngine.ExecuteSequential(index++, input, executionContext,
                                                                         MiniBatchSequenceType.Standard);
                }

                Console.WriteLine();
            }
        }
Exemple #5
0
        static void ReberPrediction()
        {
            var grammar   = new ReberGrammar(false);
            var sequences = grammar.GetExtended(10).Take(500).ToList();
            var data      = ReberGrammar.GetOneHot(sequences).Split(0);

            using (var lap = BrightWireProvider.CreateLinearAlgebra(false)) {
                var graph       = new GraphFactory(lap);
                var errorMetric = graph.ErrorMetric.BinaryClassification;
                var propertySet = graph.CurrentPropertySet
                                  .Use(graph.GradientDescent.RmsProp)
                                  .Use(graph.WeightInitialisation.Xavier)
                ;

                // create the engine
                var trainingData = graph.CreateDataSource(data.Training);
                var testData     = trainingData.CloneWith(data.Test);
                var engine       = graph.CreateTrainingEngine(trainingData, 0.1f, 32);

                // build the network
                const int HIDDEN_LAYER_SIZE = 64;
                var       memory            = new float[HIDDEN_LAYER_SIZE];
                var       network           = graph.Connect(engine)
                                              //.AddSimpleRecurrent(graph.ReluActivation(), memory)
                                              .AddGru(memory)
                                              //.AddRan(memory)
                                              //.AddLstm(memory)

                                              .AddFeedForward(engine.DataSource.OutputSize)
                                              .Add(graph.TanhActivation())
                                              .AddBackpropagationThroughTime(errorMetric)
                ;

                engine.Train(30, testData, errorMetric);

                // generate sample sequence
                var networkGraph = engine.Graph;
                using (var executionContext = graph.CreateExecutionContext()) {
                    var executionEngine = graph.CreateEngine(networkGraph);

                    Console.WriteLine("Generating a new reber sequence...");
                    var input = new float[ReberGrammar.Size];
                    input[ReberGrammar.GetIndex('B')] = 1f;
                    Console.Write("B");

                    int index = 0, eCount = 0;
                    var result = executionEngine.ExecuteSequential(index++, input, executionContext, MiniBatchSequenceType.SequenceStart);
                    for (var i = 0; i < 32; i++)
                    {
                        var nextIndex = result.Output[0].Data
                                        .Select((v, j) => (v, j))
                                        .Where(d => d.Item1 >= 0.5f)
                                        .Select(d => d.Item2)
                                        .Shuffle()
                                        .FirstOrDefault()
                        ;
                        if (index == 0)
                        {
                            break;
                        }

                        Console.Write(ReberGrammar.GetChar(nextIndex));
                        if (nextIndex == ReberGrammar.GetIndex('E') && ++eCount == 2)
                        {
                            break;
                        }

                        input            = new float[ReberGrammar.Size];
                        input[nextIndex] = 1f;
                        result           = executionEngine.ExecuteSequential(index++, input, executionContext, MiniBatchSequenceType.Standard);
                    }
                    Console.WriteLine();
                }
            }
        }