public INeuralNetworkLayerUpdater CreateUpdater(INeuralNetworkLayer layer, LayerDescriptor descriptor) { var primary = _CreatePrimaryUpdater(layer, descriptor.Regularisation, descriptor.Lambda); switch (descriptor.WeightUpdate) { case WeightUpdateType.Adagrad: return(_weightUpdater.Adagrad(primary)); case WeightUpdateType.Momentum: return(_weightUpdater.Momentum(primary, descriptor.Momentum)); case WeightUpdateType.NesterovMomentum: return(_weightUpdater.NesterovMomentum(primary, descriptor.Momentum)); case WeightUpdateType.RMSprop: return(_weightUpdater.RMSprop(primary, descriptor.DecayRate)); case WeightUpdateType.Adam: return(_weightUpdater.Adam(primary, descriptor.DecayRate, descriptor.DecayRate2)); default: return(primary); } }
/// <summary> /// Clones the current descriptor /// </summary> public LayerDescriptor Clone() { var ret = new LayerDescriptor(); CopyTo(ret); return(ret); }
public INeuralNetworkTrainer CreateBatchTrainer(LayerDescriptor descriptor, params int[] layerSizes) { return(CreateBatchTrainer( Enumerable.Range(0, layerSizes.Length - 1) .Select(i => CreateTrainer(layerSizes[i], layerSizes[i + 1], descriptor)) .ToList() )); }
/// <summary> /// Copies attributes from this layer to the target layer /// </summary> /// <param name="layer">The target layer</param> public void CopyTo(LayerDescriptor layer) { layer.Activation = this.Activation; layer.WeightInitialisation = this.WeightInitialisation; layer.DecayRate = this.DecayRate; layer.LayerTrainer = this.LayerTrainer; layer.Dropout = this.Dropout; layer.Momentum = this.Momentum; layer.Regularisation = this.Regularisation; layer.WeightUpdate = this.WeightUpdate; layer.Lambda = this.Lambda; layer.DecayRate2 = this.DecayRate2; }
StandardFeedForward _ReadFeedForward(NetworkLayer layer) { var descriptor = LayerDescriptor.CreateFrom(layer); var bias = _lap.Create(layer.OutputSize, 0f); bias.Data = layer.Bias; var weight = _lap.Create(layer.InputSize, layer.OutputSize, 0f); weight.Data = layer.Weight; return(new StandardFeedForward(weight, bias, _activation[descriptor.Activation])); }
public INeuralNetworkLayerTrainer CreateTrainer(INeuralNetworkLayer layer, LayerDescriptor descriptor) { var layerUpdater = CreateUpdater(layer, descriptor); return(_CreateLayerUpdater(layerUpdater, descriptor)); }
public INeuralNetworkLayerTrainer CreateTrainer(int inputSize, int outputSize, LayerDescriptor descriptor) { var layerUpdater = CreateUpdater(inputSize, outputSize, descriptor); return(_CreateLayerUpdater(layerUpdater, descriptor)); }
INeuralNetworkLayerTrainer _CreateLayerUpdater(INeuralNetworkLayerUpdater layerUpdater, LayerDescriptor init) { switch (init.LayerTrainer) { case LayerTrainerType.DropConnect: return(_trainer.DropConnect(layerUpdater, init.Dropout)); case LayerTrainerType.Dropout: return(_trainer.Dropout(layerUpdater, init.Dropout)); default: return(_trainer.Standard(layerUpdater)); } }
public INeuralNetworkRecurrentLayer CreateFeedForwardRecurrentLayer(int inputSize, int outputSize, LayerDescriptor descriptor) { return(new Training.Layer.Recurrent.FeedForward(CreateTrainer(inputSize, outputSize, descriptor))); }
public INeuralNetworkRecurrentLayer CreateLstmRecurrentLayer(int inputSize, int outputSize, LayerDescriptor descriptor) { return(new Training.Layer.Recurrent.Lstm(inputSize, outputSize, this, descriptor)); }
public INeuralNetworkLayer CreateTiedLayer(INeuralNetworkLayer layer, LayerDescriptor descriptor) { return(new TiedLayer(_lap, layer, _weightInitialisation[descriptor.WeightInitialisation])); }
public INeuralNetworkLayer CreateLayer(int inputSize, int outputSize, LayerDescriptor descriptor) { return(new Standard(_lap, inputSize, outputSize, descriptor, _activation[descriptor.Activation], _weightInitialisation[descriptor.WeightInitialisation])); }