示例#1
0
        public override ISequenceModel Generate(Matrix X, Matrix Y)
        {
            // dense autoencoders learn the approximation identity function so ignore labels.
            // the output layer is the number of columns in X

            // default hidden layer to 2/3 of the input
            this.Preprocess(X);

            if (this.Density <= 0)
            {
                this.Density = (int)System.Math.Ceiling(X.Cols * (2.0 / 3.0));
            }

            if (this.MaxIterations <= 0)
            {
                MaxIterations = 400;                          // because Seth said so...
            }
            Network network = Network.New().Create(X.Cols, X.Cols, this.Activation, this.OutputFunction,
                                                   (i, j) => new AutoencoderNeuron(), epsilon: this.Epsilon, hiddenLayers: new int[] { this.Density });

            var model = new AutoencoderModel
            {
                Descriptor        = Descriptor,
                NormalizeFeatures = base.NormalizeFeatures,
                FeatureNormalizer = base.FeatureNormalizer,
                FeatureProperties = base.FeatureProperties,
                Network           = network,
                OutputFunction    = this.OutputFunction
            };

            OnModelChanged(this, ModelEventArgs.Make(model, "Initialized"));

            NetworkTrainingProperties properties = NetworkTrainingProperties.Create(network, X.Rows, X.Cols, this.LearningRate,
                                                                                    this.Lambda, this.MaxIterations, new { this.Density, this.Sparsity, this.SparsityWeight });

            for (int i = 0; i < this.MaxIterations; i++)
            {
                properties.Iteration = i;

                for (int x = 0; x < X.Rows; x++)
                {
                    network.Forward(X[x, VectorType.Row]);
                    //OnModelChanged(this, ModelEventArgs.Make(model, "Forward"));
                    network.Back(X[x, VectorType.Row], properties);
                }

                var result = String.Format("Run ({0}/{1}): {2}", i, MaxIterations, network.Cost);
                OnModelChanged(this, ModelEventArgs.Make(model, result));
            }

            return(model);
        }
示例#2
0
        /// <summary>
        /// Generates a GRU neural network model for predicting sequences.
        /// </summary>
        /// <param name="X">Matrix of training data.</param>
        /// <param name="Y">Matrix of matching sequence labels.</param>
        /// <returns>GatedRecurrentModel.</returns>
        public override ISequenceModel Generate(Matrix X, Matrix Y)
        {
            this.Preprocess(X);

            // because Seth said so...
            if (MaxIterations <= 0)
            {
                MaxIterations = 500;
            }

            Network network = Network.New().Create(X.Cols, Y.Cols, Activation, OutputFunction,
                                                   fnNodeInitializer: (i, j, type) =>
            {
                if (type == NodeType.Hidden || type == NodeType.Output)
                {
                    return new RecurrentNeuron()
                    {
                        ActivationFunction = this.Activation,
                        ResetGate          = this.ResetGate,
                        UpdateGate         = this.UpdateGate
                    }
                }
                ;
                else
                {
                    return(new Neuron());
                }
            },
                                                   epsilon: Epsilon, lossFunction: new CrossEntropyLoss());

            var model = new GatedRecurrentModel
            {
                Descriptor        = Descriptor,
                NormalizeFeatures = base.NormalizeFeatures,
                FeatureNormalizer = base.FeatureNormalizer,
                FeatureProperties = base.FeatureProperties,
                Network           = network,
                OutputFunction    = this.OutputFunction
            };

            int m = X.Rows;

            OnModelChanged(this, ModelEventArgs.Make(model, "Initialized"));

            NetworkTrainingProperties properties = NetworkTrainingProperties.Create(network, X.Rows, X.Cols, this.LearningRate, this.Lambda, this.MaxIterations,
                                                                                    new { this.SequenceLength });

            INetworkTrainer trainer = new GradientDescentTrainer();

            Vector loss = Vector.Zeros(MaxIterations);

            Matrix Yt = Matrix.Zeros(Y.Rows, Y.Cols);

            var tuples = X.GetRows().Select((s, si) => new Tuple <Vector, Vector>(s, Y[si]));

            for (int pass = 0; pass < MaxIterations; pass++)
            {
                properties.Iteration = pass;

                tuples.Batch(SequenceLength, (idx, items) =>
                {
                    network.ResetStates(properties);

                    for (int i = 0; i < Enumerable.Count <Tuple <Vector, Vector> >(items); i++)
                    {
                        properties[RecurrentNeuron.TimeStepLabel] = i;
                        network.Forward(Enumerable.ElementAt <Tuple <Vector, Vector> >(items, i).Item1);

                        foreach (RecurrentNeuron node in network.GetVertices().OfType <RecurrentNeuron>())
                        {
                            if (node.IsHidden || node.IsOutput)
                            {
                                node.State(properties);
                            }
                        }

                        Yt[idx + i] = network.Output();
                    }

                    for (int i = Enumerable.Count <Tuple <Vector, Vector> >(items) - 1; i >= 0; i--)
                    {
                        properties[RecurrentNeuron.TimeStepLabel] = i;
                        network.Back(Enumerable.ElementAt <Tuple <Vector, Vector> >(items, i).Item2, properties, trainer);

                        loss[pass] += network.Cost;
                    }
                }, asParallel: false);

                var output = String.Format("Run ({0}/{1}): {2}", pass, MaxIterations, network.Cost);
                OnModelChanged(this, ModelEventArgs.Make(model, output));

                if (this.LossMinimized(loss, pass))
                {
                    break;
                }
            }

            return(model);
        }
示例#3
0
        public override ISequenceModel Generate(Matrix X, Matrix Y)
        {
            // autoencoders learn the approximation identity function so ignore labels.
            // the output layer is the number of columns in X

            this.Preprocess(X);

            // default hidden layer to 2/3 of the input
            if (this.Density <= 0)
            {
                this.Density = (int)System.Math.Ceiling(X.Cols * (2.0 / 3.0));
            }

            if (this.MaxIterations <= 0)
            {
                MaxIterations = 400;
            }

            var identity = new Ident();

            Network network = Network.New().Create(X.Cols, Y.Cols, this.Activation, this.OutputFunction,
                                                   (i, j, type) => new AutoencoderNeuron {
                ActivationFunction = (type == NodeType.Output ? identity : null)
            },
                                                   epsilon: this.Epsilon, hiddenLayers: new int[] { this.Density });

            INetworkTrainer trainer = new RMSPropTrainer(); // because Geoffrey Hinton :) ...

            var model = new AutoencoderModel
            {
                Descriptor        = Descriptor,
                NormalizeFeatures = base.NormalizeFeatures,
                FeatureNormalizer = base.FeatureNormalizer,
                FeatureProperties = base.FeatureProperties,
                Network           = network
            };

            OnModelChanged(this, ModelEventArgs.Make(model, "Initialized"));

            NetworkTrainingProperties properties = NetworkTrainingProperties.Create(network, X.Rows, Y.Cols, this.LearningRate,
                                                                                    this.Lambda, this.MaxIterations, new { this.Density, this.Sparsity, this.SparsityWeight });

            Vector loss = Vector.Zeros(this.MaxIterations);

            for (int i = 0; i < this.MaxIterations; i++)
            {
                properties.Iteration = i;

                network.ResetStates(properties);

                for (int x = 0; x < X.Rows; x++)
                {
                    network.Forward(X[x, VectorType.Row]);
                    //OnModelChanged(this, ModelEventArgs.Make(model, "Forward"));
                    network.Back(Y[x, VectorType.Row], properties, trainer);

                    loss[i] += network.Cost;
                }

                var result = String.Format("Run ({0}/{1}): {2}", i, MaxIterations, network.Cost);
                OnModelChanged(this, ModelEventArgs.Make(model, result));

                if (this.LossMinimized(loss, i))
                {
                    break;
                }
            }

            return(model);
        }
示例#4
0
        /// <summary>
        /// Generates a GRU neural network model for predicting sequences.
        /// </summary>
        /// <param name="X">Matrix of training data.</param>
        /// <param name="Y">Matrix of matching sequence labels.</param>
        /// <returns>GatedRecurrentModel.</returns>
        public ISequenceModel Generate(Matrix X, Matrix Y)
        {
            this.Preprocess(X);

            // because Seth said so...
            if (MaxIterations <= 0)
            {
                MaxIterations = 500;
            }

            Network network = Network.New().Create(X.Cols, Y.Cols, Activation, OutputFunction,
                                                   fnNodeInitializer: (i, j) => new RecurrentNeuron()
            {
                ActivationFunction = this.Activation,
                ResetGate          = this.ResetGate,
                MemoryGate         = this.UpdateGate,

                DeltaH = Vector.Zeros(this.SequenceLength)
            }, epsilon: Epsilon);

            var model = new GatedRecurrentModel
            {
                Descriptor        = Descriptor,
                NormalizeFeatures = base.NormalizeFeatures,
                FeatureNormalizer = base.FeatureNormalizer,
                FeatureProperties = base.FeatureProperties,
                Network           = network,
                OutputFunction    = this.OutputFunction
            };

            int m = X.Rows;

            OnModelChanged(this, ModelEventArgs.Make(model, "Initialized"));

            NetworkTrainingProperties properties = NetworkTrainingProperties.Create(network, X.Rows, X.Cols, this.LearningRate, this.Lambda, this.MaxIterations,
                                                                                    new { this.SequenceLength });

            Vector loss = Vector.Zeros(MaxIterations);

            var tuples = X.GetRows().Select((s, si) => new Tuple <Vector, Vector>(s, Y[si]));

            for (int pass = 0; pass < MaxIterations; pass++)
            {
                properties.Iteration = pass;

                tuples.Batch(SequenceLength, (idx, items) =>
                {
                    network.ResetStates(properties);

                    for (int i = 0; idx < items.Count(); idx++)
                    {
                        network.Forward(items.ElementAt(i).Item1);
                        network.Back(items.ElementAt(i).Item2, properties);
                    }
                }, asParallel: false);

                loss[pass] = network.Cost;

                var output = String.Format("Run ({0}/{1}): {2}", pass, MaxIterations, network.Cost);
                OnModelChanged(this, ModelEventArgs.Make(model, output));
            }

            return(model);
        }