public SampledSoftmaxLayer(SampledSoftmaxLayerConfig config) : base(config) { this.config = config; if (negativeSampleSize > LayerSize) { throw new ArgumentException( $"The size of negative sampling('{negativeSampleSize}') cannot be greater than the hidden layer size('{LayerSize}')."); } }
private void SetOutputLayers() { //Settings for output layer var outputLayer = config.GetValueRequired(OUTPUT_LAYER); var items = outputLayer.Split(':'); var sLayerType = items[0]; var outputLayerType = LayerType.None; foreach ( var type in Enum.GetValues(typeof(LayerType)) .Cast <LayerType>() .Where(type => sLayerType.Equals(type.ToString(), StringComparison.InvariantCultureIgnoreCase))) { outputLayerType = type; break; } if (IsCRFTraining == true && outputLayerType != LayerType.Simple) { throw new ArgumentException($"For RNN-CRF model, its output layer type must be simple layer."); } switch (outputLayerType) { case LayerType.Softmax: OutputLayerConfig = new SoftmaxLayerConfig(); Logger.WriteLine("Initialize configuration for softmax layer."); break; case LayerType.SampledSoftmax: var sampledSoftmaxLayerConfig = new SampledSoftmaxLayerConfig { NegativeSampleSize = int.Parse(items[1]) }; OutputLayerConfig = sampledSoftmaxLayerConfig; Logger.WriteLine( $"Initialize configuration for sampled Softmax layer. Negative sample size = '{sampledSoftmaxLayerConfig.NegativeSampleSize}'"); break; case LayerType.Simple: OutputLayerConfig = new SimpleLayerConfig(); Logger.WriteLine("Initialize configuration for simple layer."); break; default: Logger.WriteLine($"Invalidated output layer type {sLayerType}"); throw new ArgumentException($"Invalidated output layer type {sLayerType}"); } }