public Kernel(int size, int depth, LearningRateAnnealerType type) { _learningRateAnnealers = new ILearningRateAnnealer[depth, size, size]; _learningRateAnnealers.ForEach((i, j, k) => _learningRateAnnealers[i, j, k] = LearningRateAnnealerFactory.Produce(type)); _biasLearningRateAnnealer = LearningRateAnnealerFactory.Produce(type); Weights = new double[depth, size, size]; Gradient = new double[depth, size, size]; }
public static ILearningRateAnnealer Produce(LearningRateAnnealerType type) { switch (type) { case LearningRateAnnealerType.Adagrad: return(new Adagrad(0.1)); case LearningRateAnnealerType.RMSprop: return(new RMSprop(0.1, 0.99)); default: throw new Exception("LearningRateAnnealerType is not supported"); } }
public Neuron( IActivator activator, IWeightInitializer weightInitializer, int numberOfConnections, LearningRateAnnealerType lrat) { Weights = new double[numberOfConnections]; _learningRateAnnealers = new ILearningRateAnnealer[numberOfConnections]; _learningRateAnnealers.ForEach((q, i) => _learningRateAnnealers[i] = LearningRateAnnealerFactory.Produce(lrat)); _biasLearningRateAnnealer = LearningRateAnnealerFactory.Produce(lrat); double magnitude = 1 / Math.Sqrt(numberOfConnections); Weights.ForEach((q, i) => Weights[i] = weightInitializer.GenerateRandom(magnitude)); _activator = activator; }
public FullyConnectedLayer( IActivator activator, int numberOfNeurons, int numberOfNeuronsInPreviouseLayer, int layerIndex, IWeightInitializer weightInitializer, LearningRateAnnealerType lrat) : base(layerIndex) { _numberOfNeuronsInPreviouseLayer = numberOfNeuronsInPreviouseLayer; List <Neuron> neurons = new List <Neuron>(); for (int i = 0; i < numberOfNeurons; i++) { neurons.Add(new Neuron(activator, weightInitializer, numberOfNeuronsInPreviouseLayer, lrat)); } Neurons = new List <Neuron>(neurons); }
public void AddConvolutionalLayer(int numberOfKernels, int kernelSize, LearningRateAnnealerType annealerType) { if (_layers.Any()) { _layers.Add(new ConvolutionalLayer( numberOfKernels, kernelSize, _layers.Last().LayerIndex + 1, _layers.OfType <FilterLayer>().Last().GetOutputFilterMeta(), new WeightInitializer(), annealerType)); } else { _layers.Add(new ConvolutionalLayer( numberOfKernels, kernelSize, 1, new FilterMeta(_networkConfig.InputDimenision, _networkConfig.InputChannels), new WeightInitializer(), annealerType)); } }
public void AddFullyConnectedLayer(int numberOfNeurons, ActivatorType activatorType, LearningRateAnnealerType lrat) { if (!_layers.OfType <FullyConnectedLayer>().Any()) { var last = _layers.OfType <FilterLayer>().Last(); var fm = last.GetOutputFilterMeta(); _layers.Add(new FlattenLayer(fm.Channels, fm.Size, last.LayerIndex + 1)); } _layers.Add(new FullyConnectedLayer( ActivatorFactory.Produce(activatorType), numberOfNeurons, _layers.Last().GetNumberOfOutputValues(), _layers.Last().LayerIndex + 1, _weightInitializer, lrat)); }
public ConvolutionalLayer(int nk, int ks, int li, FilterMeta ifm, IWeightInitializer wi, LearningRateAnnealerType lrat) : base(li, ifm) { _numberOfKernels = nk; _kernelSize = ks; List <Kernel> temp = new List <Kernel>(); for (int i = 0; i < _numberOfKernels; i++) { var k = new Kernel(ks, ifm.Channels, lrat); k.RandomizeWeights(wi); temp.Add(k); } _kernels = new List <Kernel>(temp); _inputeFm = ifm; _outputFm = GetOutputFilterMeta(); _featureMaps = new double[_outputFm.Channels, _outputFm.Size, _outputFm.Size]; }