/// <summary> /// Generates a GRU neural network model for predicting sequences. /// </summary> /// <param name="X">Matrix of training data.</param> /// <param name="Y">Matrix of matching sequence labels.</param> /// <returns>GatedRecurrentModel.</returns> public ISequenceModel Generate(Matrix X, Matrix Y) { this.Preprocess(X); // because Seth said so... if (MaxIterations <= 0) { MaxIterations = 500; } Network network = Network.New().Create(X.Cols, Y.Cols, Activation, OutputFunction, fnNodeInitializer: (i, j) => new RecurrentNeuron() { ActivationFunction = this.Activation, ResetGate = this.ResetGate, MemoryGate = this.UpdateGate, DeltaH = Vector.Zeros(this.SequenceLength) }, epsilon: Epsilon); var model = new GatedRecurrentModel { Descriptor = Descriptor, NormalizeFeatures = base.NormalizeFeatures, FeatureNormalizer = base.FeatureNormalizer, FeatureProperties = base.FeatureProperties, Network = network, OutputFunction = this.OutputFunction }; int m = X.Rows; OnModelChanged(this, ModelEventArgs.Make(model, "Initialized")); NetworkTrainingProperties properties = NetworkTrainingProperties.Create(network, X.Rows, X.Cols, this.LearningRate, this.Lambda, this.MaxIterations, new { this.SequenceLength }); Vector loss = Vector.Zeros(MaxIterations); var tuples = X.GetRows().Select((s, si) => new Tuple <Vector, Vector>(s, Y[si])); for (int pass = 0; pass < MaxIterations; pass++) { properties.Iteration = pass; tuples.Batch(SequenceLength, (idx, items) => { network.ResetStates(properties); for (int i = 0; idx < items.Count(); idx++) { network.Forward(items.ElementAt(i).Item1); network.Back(items.ElementAt(i).Item2, properties); } }, asParallel: false); loss[pass] = network.Cost; var output = String.Format("Run ({0}/{1}): {2}", pass, MaxIterations, network.Cost); OnModelChanged(this, ModelEventArgs.Make(model, output)); } return(model); }
public override ISequenceModel Generate(Matrix X, Matrix Y) { // autoencoders learn the approximation identity function so ignore labels. // the output layer is the number of columns in X this.Preprocess(X); // default hidden layer to 2/3 of the input if (this.Density <= 0) { this.Density = (int)System.Math.Ceiling(X.Cols * (2.0 / 3.0)); } if (this.MaxIterations <= 0) { MaxIterations = 400; } var identity = new Ident(); Network network = Network.New().Create(X.Cols, Y.Cols, this.Activation, this.OutputFunction, (i, j, type) => new AutoencoderNeuron { ActivationFunction = (type == NodeType.Output ? identity : null) }, epsilon: this.Epsilon, hiddenLayers: new int[] { this.Density }); INetworkTrainer trainer = new RMSPropTrainer(); // because Geoffrey Hinton :) ... var model = new AutoencoderModel { Descriptor = Descriptor, NormalizeFeatures = base.NormalizeFeatures, FeatureNormalizer = base.FeatureNormalizer, FeatureProperties = base.FeatureProperties, Network = network }; OnModelChanged(this, ModelEventArgs.Make(model, "Initialized")); NetworkTrainingProperties properties = NetworkTrainingProperties.Create(network, X.Rows, Y.Cols, this.LearningRate, this.Lambda, this.MaxIterations, new { this.Density, this.Sparsity, this.SparsityWeight }); Vector loss = Vector.Zeros(this.MaxIterations); for (int i = 0; i < this.MaxIterations; i++) { properties.Iteration = i; network.ResetStates(properties); for (int x = 0; x < X.Rows; x++) { network.Forward(X[x, VectorType.Row]); //OnModelChanged(this, ModelEventArgs.Make(model, "Forward")); network.Back(Y[x, VectorType.Row], properties, trainer); loss[i] += network.Cost; } var result = String.Format("Run ({0}/{1}): {2}", i, MaxIterations, network.Cost); OnModelChanged(this, ModelEventArgs.Make(model, result)); if (this.LossMinimized(loss, i)) { break; } } return(model); }
public override ISequenceModel Generate(Matrix X, Matrix Y) { // dense autoencoders learn the approximation identity function so ignore labels. // the output layer is the number of columns in X // default hidden layer to 2/3 of the input Preprocess(X); if (Density <= 0) { Density = (int)System.Math.Ceiling(X.Cols * (2.0 / 3.0)); } if (MaxIterations <= 0) { MaxIterations = 400; // because Seth said so... } var network = Network.New().Create( X.Cols, X.Cols, Activation, OutputFunction, (i, j) => new AutoencoderNeuron(), epsilon: Epsilon, hiddenLayers: new[] { Density }); var model = new AutoencoderModel { Descriptor = Descriptor, NormalizeFeatures = NormalizeFeatures, FeatureNormalizer = FeatureNormalizer, FeatureProperties = FeatureProperties, Network = network, OutputFunction = OutputFunction }; OnModelChanged(this, ModelEventArgs.Make(model, "Initialized")); var properties = NetworkTrainingProperties.Create( network, X.Rows, X.Cols, LearningRate, Lambda, MaxIterations, new { Density, Sparsity, SparsityWeight }); for (var i = 0; i < MaxIterations; i++) { properties.Iteration = i; for (var x = 0; x < X.Rows; x++) { network.Forward(X[x, VectorType.Row]); //OnModelChanged(this, ModelEventArgs.Make(model, "Forward")); network.Back(X[x, VectorType.Row], properties); } var result = string.Format("Run ({0}/{1}): {2}", i, MaxIterations, network.Cost); OnModelChanged(this, ModelEventArgs.Make(model, result)); } return(model); }