Exemple #1
0
        public void BidirectionalAddition()
        {
            var trainingSet = BinaryIntegers.Addition(10, false).Select(l => l.ToArray()).ToList();

            const int HIDDEN_SIZE = 16, NUM_EPOCHS = 100, BATCH_SIZE = 32;
            var       errorMetric = ErrorMetricType.BinaryClassification.Create();

            var layerTemplate = new LayerDescriptor(0.1f)
            {
                Activation           = ActivationType.LeakyRelu,
                WeightInitialisation = WeightInitialisationType.Gaussian,
                DecayRate            = 0.99f
            };

            var recurrentTemplate = layerTemplate.Clone();

            recurrentTemplate.WeightInitialisation = WeightInitialisationType.Gaussian;

            var trainingDataProvider = _lap.NN.CreateSequentialTrainingDataProvider(trainingSet);
            var layers = new INeuralNetworkBidirectionalLayer[] {
                _lap.NN.CreateBidirectionalLayer(
                    _lap.NN.CreateSimpleRecurrentLayer(trainingDataProvider.InputSize, HIDDEN_SIZE, recurrentTemplate),
                    _lap.NN.CreateSimpleRecurrentLayer(trainingDataProvider.InputSize, HIDDEN_SIZE, recurrentTemplate)
                    ),
                _lap.NN.CreateBidirectionalLayer(_lap.NN.CreateFeedForwardRecurrentLayer(HIDDEN_SIZE * 2, trainingDataProvider.OutputSize, layerTemplate))
            };
            BidirectionalNetwork networkData = null;

            using (var trainer = _lap.NN.CreateBidirectionalBatchTrainer(layers)) {
                var forwardMemory   = Enumerable.Range(0, HIDDEN_SIZE).Select(i => 0f).ToArray();
                var backwardMemory  = Enumerable.Range(0, HIDDEN_SIZE).Select(i => 0f).ToArray();
                var trainingContext = _lap.NN.CreateTrainingContext(errorMetric, 0.1f, BATCH_SIZE);
                trainingContext.RecurrentEpochComplete += (tc, rtc) => {
                    Debug.WriteLine(tc.LastTrainingError);
                };
                trainer.Train(trainingDataProvider, forwardMemory, backwardMemory, NUM_EPOCHS, _lap.NN.CreateRecurrentTrainingContext(trainingContext));
                networkData = trainer.NetworkInfo;
                networkData.ForwardMemory = new FloatArray {
                    Data = forwardMemory
                };
                networkData.BackwardMemory = new FloatArray {
                    Data = backwardMemory
                };
            }

            var network = _lap.NN.CreateBidirectional(networkData);

            foreach (var sequence in trainingSet)
            {
                var result = network.Execute(sequence.Select(d => d.Input).ToList());
            }
        }
        public IBidirectionalRecurrentExecution CreateBidirectional(BidirectionalNetwork network)
        {
            var layerCount = network.Layer.Length;
            var layer      = new List <Tuple <IRecurrentLayerExecution, IRecurrentLayerExecution> >();

            for (var i = 0; i < layerCount; i++)
            {
                var l = network.Layer[i];
                IRecurrentLayerExecution forward = null, backward = null;
                if (l.Forward != null)
                {
                    forward = _GetRecurrentExecution(l.Forward);
                }
                if (l.Backward != null)
                {
                    backward = _GetRecurrentExecution(l.Backward);
                }
                layer.Add(Tuple.Create(forward, backward));
            }
            return(new BidirectionalExecution(_lap, layer, _lap.Create(network.ForwardMemory.Data), _lap.Create(network.BackwardMemory.Data), network.Padding));
        }
        protected bool _CalculateTestScore(ITrainingContext context, float[] forwardMemory, float[] backwardMemory, ISequentialTrainingDataProvider data, INeuralNetworkBidirectionalBatchTrainer network, IRecurrentTrainingContext recurrentContext, ref double bestScore, ref BidirectionalNetwork output)
        {
            bool flag        = false;
            var  score       = _GetScore(data, network, forwardMemory, backwardMemory, recurrentContext);
            var  errorMetric = recurrentContext.TrainingContext.ErrorMetric;

            if ((errorMetric.HigherIsBetter && score > bestScore) || (!errorMetric.HigherIsBetter && score < bestScore))
            {
                bestScore            = score;
                output               = network.NetworkInfo;
                output.ForwardMemory = new FloatArray {
                    Data = forwardMemory
                };
                output.BackwardMemory = new FloatArray {
                    Data = backwardMemory
                };
                flag = true;
            }
            context.WriteScore(score, errorMetric.DisplayAsPercentage, flag);
            return(flag);
        }