Exemplo n.º 1
0
        public virtual ISequenceModel Generate(Matrix X, Matrix Y)
        {
            this.Preprocess(X);
            // because I said so...
            if (MaxIterations == -1)
            {
                MaxIterations = 500;
            }

            var network = Network.New().Create(X.Cols, Y.Cols, Activation, OutputFunction, epsilon: Epsilon);

            INetworkTrainer trainer = new GradientDescentTrainer();

            var model = new NeuralNetworkModel
            {
                Descriptor        = Descriptor,
                NormalizeFeatures = base.NormalizeFeatures,
                FeatureNormalizer = base.FeatureNormalizer,
                FeatureProperties = base.FeatureProperties,
                Network           = network
            };

            OnModelChanged(this, ModelEventArgs.Make(model, "Initialized"));

            NetworkTrainingProperties properties = NetworkTrainingProperties.Create(network, X.Rows, X.Cols, this.LearningRate, this.Lambda, this.MaxIterations);

            Vector loss = Vector.Zeros(this.MaxIterations);

            for (int i = 0; i < MaxIterations; i++)
            {
                properties.Iteration = i;

                network.ResetStates(properties);

                for (int x = 0; x < X.Rows; x++)
                {
                    network.Forward(X[x, VectorType.Row]);
                    //OnModelChanged(this, ModelEventArgs.Make(model, "Forward"));
                    network.Back(Y[x, VectorType.Row], properties, trainer);

                    loss[i] += network.Cost;
                }

                var output = String.Format("Run ({0}/{1}): {2}", i, MaxIterations, network.Cost);
                OnModelChanged(this, ModelEventArgs.Make(model, output));

                if (this.LossMinimized(loss, i))
                {
                    break;
                }
            }

            return(model);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Generates a GRU neural network model for predicting sequences.
        /// </summary>
        /// <param name="X">Matrix of training data.</param>
        /// <param name="Y">Matrix of matching sequence labels.</param>
        /// <returns>GatedRecurrentModel.</returns>
        public override ISequenceModel Generate(Matrix X, Matrix Y)
        {
            this.Preprocess(X);

            // because Seth said so...
            if (MaxIterations <= 0)
            {
                MaxIterations = 500;
            }

            Network network = Network.New().Create(X.Cols, Y.Cols, Activation, OutputFunction,
                                                   fnNodeInitializer: (i, j, type) =>
            {
                if (type == NodeType.Hidden || type == NodeType.Output)
                {
                    return new RecurrentNeuron()
                    {
                        ActivationFunction = this.Activation,
                        ResetGate          = this.ResetGate,
                        UpdateGate         = this.UpdateGate
                    }
                }
                ;
                else
                {
                    return(new Neuron());
                }
            },
                                                   epsilon: Epsilon, lossFunction: new CrossEntropyLoss());

            var model = new GatedRecurrentModel
            {
                Descriptor        = Descriptor,
                NormalizeFeatures = base.NormalizeFeatures,
                FeatureNormalizer = base.FeatureNormalizer,
                FeatureProperties = base.FeatureProperties,
                Network           = network,
                OutputFunction    = this.OutputFunction
            };

            int m = X.Rows;

            OnModelChanged(this, ModelEventArgs.Make(model, "Initialized"));

            NetworkTrainingProperties properties = NetworkTrainingProperties.Create(network, X.Rows, X.Cols, this.LearningRate, this.Lambda, this.MaxIterations,
                                                                                    new { this.SequenceLength });

            INetworkTrainer trainer = new GradientDescentTrainer();

            Vector loss = Vector.Zeros(MaxIterations);

            Matrix Yt = Matrix.Zeros(Y.Rows, Y.Cols);

            var tuples = X.GetRows().Select((s, si) => new Tuple <Vector, Vector>(s, Y[si]));

            for (int pass = 0; pass < MaxIterations; pass++)
            {
                properties.Iteration = pass;

                tuples.Batch(SequenceLength, (idx, items) =>
                {
                    network.ResetStates(properties);

                    for (int i = 0; i < Enumerable.Count <Tuple <Vector, Vector> >(items); i++)
                    {
                        properties[RecurrentNeuron.TimeStepLabel] = i;
                        network.Forward(Enumerable.ElementAt <Tuple <Vector, Vector> >(items, i).Item1);

                        foreach (RecurrentNeuron node in network.GetVertices().OfType <RecurrentNeuron>())
                        {
                            if (node.IsHidden || node.IsOutput)
                            {
                                node.State(properties);
                            }
                        }

                        Yt[idx + i] = network.Output();
                    }

                    for (int i = Enumerable.Count <Tuple <Vector, Vector> >(items) - 1; i >= 0; i--)
                    {
                        properties[RecurrentNeuron.TimeStepLabel] = i;
                        network.Back(Enumerable.ElementAt <Tuple <Vector, Vector> >(items, i).Item2, properties, trainer);

                        loss[pass] += network.Cost;
                    }
                }, asParallel: false);

                var output = String.Format("Run ({0}/{1}): {2}", pass, MaxIterations, network.Cost);
                OnModelChanged(this, ModelEventArgs.Make(model, output));

                if (this.LossMinimized(loss, pass))
                {
                    break;
                }
            }

            return(model);
        }