private Mutator(INeuralNetworkFactory networkFactory, IWeightInitializer weightInitializer, MutationConfigurationSettings config) { _networkFactory = networkFactory; _weightInitializer = weightInitializer; _config = config; _random = new Random(); }
public RadialBasisHiddenLayer( int hiddenNeuronCount , int inputVectorDimension , int outputVectorDimension , ICenterInitializer centerInitializer , IRadiusInitializer radiusInitializer , IWeightInitializer weightInitializer , IHiddenLayerInitializer hiddenLayerInitializer , bool isOffsetNeuron , IRBFActivationFunction activationFunction) { HiddenNeuronCount = hiddenNeuronCount; IsOffsetNeuron = isOffsetNeuron; InputVectorDimension = inputVectorDimension; OutputVectorDimension = outputVectorDimension; ActivationFunction = activationFunction; hiddenLayerInitializer.Initialize( inputVectorDimension , outputVectorDimension , hiddenNeuronCount , isOffsetNeuron , centerInitializer , radiusInitializer , weightInitializer , ref Centers , ref Radiuses , ref Weights , out OffsetNeuronWeight ); }
public Network(NetworkConfiguration networkConfig) { _networkConfig = networkConfig; _costFunction = CostFunctionFactory.Produce(_networkConfig.CostFunctionType); _layers = new List <Layer>(); _weightInitializer = new WeightInitializer(); }
public Conv2D(int filters, int[] kernel_size = null, int[] strides = null, PaddingType padding = PaddingType.Valid, DataFormatType?data_format = null, int[] dilation_rate = null, string activation = null, bool use_bias = true, IWeightInitializer kernel_initializer = null, IWeightInitializer bias_initializer = null, IWeightRegularizer kernel_regularizer = null, IWeightRegularizer bias_regularizer = null, IWeightRegularizer activity_regularizer = null, IWeightConstraint kernel_constraint = null, IWeightConstraint bias_constraint = null, int?[] input_shape = null) : this(filters : filters, kernel_size : kernel_size, strides : strides, padding : padding, data_format : data_format, dilation_rate : dilation_rate, activation : Activation.Create(activation), use_bias : use_bias, kernel_initializer : kernel_initializer, bias_initializer : bias_initializer, kernel_regularizer : kernel_regularizer, bias_regularizer : bias_regularizer, activity_regularizer : activity_regularizer, kernel_constraint : kernel_constraint, bias_constraint : bias_constraint, input_shape : input_shape) { }
private Layer(string id, int length = 1, bool biased = true, IActivationFunction aFunc = null, IWeightFunction wFunc = null, IInputFunction iFunc = null, IWeightInitializer wInit = null) { this.id = id; this.length = length; this.biased = biased; if (aFunc != null) { this.aFunc = aFunc; } if (wFunc != null) { this.wFunc = wFunc; } if (iFunc != null) { this.iFunc = iFunc; } if (wInit != null) { this.wInit = wInit; } if (biased) { this.biases = this.wInit != null? this.wInit.Initialize(this.length) : Utils.WeightInitializers.InitZero.Initialize(this.length); } }
public void RandomizeWeights(IWeightInitializer weightInitializer) { int inputs = Weights.GetLength(0) * Weights.GetLength(1) * Weights.GetLength(2); double magnitude = Math.Sqrt((double)1 / inputs); Weights.ForEach((i, j, k) => Weights[i, j, k] = weightInitializer.GenerateRandom(magnitude)); }
/// <summary> /// Initializes a new instance of the <see cref="Dense"/> class. /// </summary> /// /// <param name="units">Positive integer, dimensionality of the output space.</param> /// <param name="input_dim">The input dim.</param> /// <param name="batch_input_shape">The batch input shape.</param> /// <param name="input_shape">The input shape.</param> /// <param name="activation">The activation function to use.</param> /// <param name="use_bias">Whether the layer uses a bias vector.</param> /// public Dense(int units, IActivationFunction activation = null, bool use_bias = true, IWeightInitializer kernel_initializer = null, IWeightInitializer bias_initializer = null, IWeightRegularizer kernel_regularizer = null, IWeightRegularizer bias_regularizer = null, IWeightRegularizer activity_regularizer = null, IWeightConstraint kernel_constraint = null, IWeightConstraint bias_constraint = null, int?input_dim = null, int?[] input_shape = null, int?[] batch_input_shape = null) : base(input_dim: input_dim, input_shape: input_shape, batch_input_shape: batch_input_shape) { //if 'input_shape' not in kwargs and 'input_dim' in kwargs: // kwargs['input_shape'] = (kwargs.pop('input_dim'),) if (bias_initializer == null) { bias_initializer = new Zeros(); } if (kernel_initializer == null) { kernel_initializer = new GlorotUniform(); } this.units = units; this.activation = activation; this.use_bias = use_bias; this.kernel_initializer = kernel_initializer; this.bias_initializer = bias_initializer; this.kernel_regularizer = kernel_regularizer; this.bias_regularizer = bias_regularizer; this.activity_regularizer = activity_regularizer; this.kernel_constraint = kernel_constraint; this.bias_constraint = bias_constraint; this.input_spec.Add(new InputSpec(min_ndim: 2)); this.supports_masking = true; }
protected ErrorBackPropagationStepsBase(IWeightSetter weightSetter, IWeightChangeApplier weightChangeApplier, IPerceptronUnderTraining perceptronUnderTraining, double learningRate, double momentum) { CheckLearningRate(learningRate); Perceptron = perceptronUnderTraining; _weightInitializer = new WeightInitializer(Perceptron.Network, weightSetter: weightSetter); _errorValueBackPropagator = new ErrorValueBackPropagator(Perceptron.Network); _weightChangeBackPropagator = new WeightChangeBackPropagator(weightChangeApplier, Perceptron.Network, learningRate, momentum); }
private NeuralNetworkFactory(ISomaFactory somaFactory, IAxonFactory axonFactory, ISynapseFactory hiddenSynapseFactory, ISynapseFactory inputOutputSynapseFactory, IWeightInitializer biasInitializer) { _somaFactory = somaFactory; _axonFactory = axonFactory; _hiddenSynapseFactory = hiddenSynapseFactory; _inputOutputSynapseFactory = inputOutputSynapseFactory; _biasInitiliazer = biasInitializer; }
public AutoencoderWeights(int PNumLayers, RBMLayer[] PLayers, IWeightInitializer PWInitializer) { numweightsets = PNumLayers - 1; weights = new RBMWeightSet[numweightsets]; for (int i = 0; i < numweightsets; i++) { weights[i] = new RBMWeightSet(PLayers[i].Count, PLayers[i + 1].Count, PWInitializer); } }
private void InitBiases(IWeightInitializer PWInitializer) { for (int i = 0; i < numlayers; i++) { for (int j = 0; j < layers[i].Count; j++) { layers[i].SetBias(j, PWInitializer.InitializeBias()); } } }
private void InitWeights(IWeightInitializer PWeightInit) { weights = new RBMWeightSet(numvisibles, numhiddens, PWeightInit); for (int i = 0; i < numvisibles; i++) { for (int j = 0; j < numhiddens; j++) { weights.SetWeight(i, j, Utility.NextGaussian(0, 0.1)); } } }
internal Autoencoder(List <RBMLayer> PLayers, AutoencoderLearningRate PTrainingInfo , IWeightInitializer PWInitializer) { numlayers = PLayers.Count; layers = PLayers.ToArray(); learnrate = PTrainingInfo; recognitionweights = new AutoencoderWeights(numlayers, layers, PWInitializer); generativeweights = new AutoencoderWeights(numlayers, layers, PWInitializer); errorobservers = new List <IErrorObserver>(); InitBiases(PWInitializer); InitTrainingData(); }
private Tuple <Type, object> EvaluateCreateLayer(ParseTreeNode node) { var layerParams = EvaluateKeyValuePair(node.ChildNodes[0]); int length = 1; bool biased = true; IActivationFunction aFunc = null; IInputFunction iFunc = null; IWeightFunction wFunc = null; IWeightInitializer wInit = null; if (!layerParams.ContainsKey("id")) { throw new Exception("ID is necessary to initialize a layer!"); } if (layerParams.ContainsKey("length")) { length = Int32.Parse(layerParams["length"]); } if (layerParams.ContainsKey("biased")) { biased = bool.Parse(layerParams["biased"]); } if (layerParams.ContainsKey("activationFunction")) { aFunc = (IActivationFunction)(typeof(ActivationFunctions) .GetField(layerParams["activationFunction"]) .GetValue(null)); } if (layerParams.ContainsKey("weightFunction")) { wFunc = (IWeightFunction)(typeof(WeightFunctions) .GetField(layerParams["weightFunction"]) .GetValue(null)); } if (layerParams.ContainsKey("inputFunction")) { iFunc = (IInputFunction)(typeof(InputFunctions) .GetField(layerParams["inputFunction"]) .GetValue(null)); } if (layerParams.ContainsKey("weightInitializer")) { wInit = (IWeightInitializer)(typeof(WeightInitializers) .GetField(layerParams["weightInitializer"]) .GetValue(null)); } var layer = Layer.Create(layerParams["id"], length: length, aFunc: aFunc, wFunc: wFunc, iFunc: iFunc, wInit: wInit, biased: biased); return(new Tuple <Type, object>(typeof(Layer), layer)); }
/// <summary> /// Initializes a new instance of the <see cref="Dense"/> class. /// </summary> /// /// <param name="units">Positive integer, dimensionality of the output space.</param> /// <param name="input_dim">The input dim.</param> /// <param name="batch_input_shape">The batch input shape.</param> /// <param name="input_shape">The input shape.</param> /// <param name="activation">The activation function to use.</param> /// <param name="use_bias">Whether the layer uses a bias vector.</param> /// public Dense(int units, string activation, bool use_bias = true, IWeightInitializer kernel_initializer = null, IWeightInitializer bias_initializer = null, IWeightRegularizer kernel_regularizer = null, IWeightRegularizer bias_regularizer = null, IWeightRegularizer activity_regularizer = null, IWeightConstraint kernel_constraint = null, IWeightConstraint bias_constraint = null, int?input_dim = null, int?[] input_shape = null, int?[] batch_input_shape = null) : this(units : units, activation : Activation.Create(activation), use_bias : use_bias, kernel_initializer : kernel_initializer, bias_initializer : bias_initializer, kernel_regularizer : kernel_regularizer, bias_regularizer : bias_regularizer, activity_regularizer : activity_regularizer, kernel_constraint : kernel_constraint, bias_constraint : bias_constraint, input_dim : input_dim, input_shape : input_shape, batch_input_shape : batch_input_shape) { }
public Neuron( IActivator activator, IWeightInitializer weightInitializer, int numberOfConnections, LearningRateAnnealerType lrat) { Weights = new double[numberOfConnections]; _learningRateAnnealers = new ILearningRateAnnealer[numberOfConnections]; _learningRateAnnealers.ForEach((q, i) => _learningRateAnnealers[i] = LearningRateAnnealerFactory.Produce(lrat)); _biasLearningRateAnnealer = LearningRateAnnealerFactory.Produce(lrat); double magnitude = 1 / Math.Sqrt(numberOfConnections); Weights.ForEach((q, i) => Weights[i] = weightInitializer.GenerateRandom(magnitude)); _activator = activator; }
public RBMWeightSet(int PPreSynapticLayerSize, int PPostSynapticLayerSize, IWeightInitializer PWeightInit) { presize = PPreSynapticLayerSize; postsize = PPostSynapticLayerSize; weights = new double[presize][]; weightchanges = new double[presize][]; for (int i = 0; i < presize; i++) { weights[i] = new double[postsize]; weightchanges[i] = new double[postsize]; Utility.ZeroArray(weightchanges[i]); for (int j = 0; j < postsize; j++) { weights[i][j] = PWeightInit.InitializeWeight(); } } }
public void InitializeWeights(IWeightInitializer weightInitializer) { if (!isWeightInitialized) { isWeightInitialized = true; if (CurrentLayer is IWeightInitializable) { (CurrentLayer as IWeightInitializable).SetWeights(weightInitializer); } } for (int i = 0; i < OutputLayers.Count; i++) { if (OutputLayers[i] is LayerContainer) { (OutputLayers[i] as LayerContainer).InitializeWeights(weightInitializer); } } }
public void SetWeights(IWeightInitializer weightInitializer) { float[] m_ws = new float[Weights.Rows]; float[] b_ws = new float[Biases.Rows]; for (int j = 0; j < Weights.Columns * Weights.Rows; j++) { Weights.Memory[j] = (float)weightInitializer.GetWeight(Weights.Columns, Weights.Rows); //(i + j * Weights.Height + 1) / (Weights.Width * Weights.Height + 1); // } //); for (int i = 0; i < b_ws.Length; i++) //Parallel.For(0, b_ws.Length, (i) => { b_ws[i] = weightInitializer.GetBias(); }//); Biases.Write(b_ws); }
public INeuralNetworkFactory BuildBackpropagationNetworkFactory(IWeightInitializer weightInitializer, ISomaFactory somaFactory, IActivationFunctionDerivative activationFunctionDerivative, IActivationFunction inputActivationFunction, INeuronFactory neuronFactory) { var axonFactory = BackpropagationAxonFactory.GetInstance(activationFunctionDerivative); var hiddenSynapseFactory = DecoratedSynapseFactory.GetInstance(weightInitializer, AxonFactory.GetInstance(activationFunctionDerivative)); var inputSynapseFactory = DecoratedSynapseFactory.GetInstance(new ConstantWeightInitializer(1.0), AxonFactory.GetInstance(inputActivationFunction)); var decoratedNeuronFactory = BackpropagationNeuronFactory.GetInstance(neuronFactory); INeuralNetworkFactory factory = NeuralNetworkFactory.GetInstance(somaFactory, axonFactory, hiddenSynapseFactory, inputSynapseFactory, weightInitializer, decoratedNeuronFactory); var backPropNetworkFactory = new BackpropagationNetworkFactoryDecorator(factory); return(backPropNetworkFactory); }
public FullyConnectedLayer( IActivator activator, int numberOfNeurons, int numberOfNeuronsInPreviouseLayer, int layerIndex, IWeightInitializer weightInitializer, LearningRateAnnealerType lrat) : base(layerIndex) { _numberOfNeuronsInPreviouseLayer = numberOfNeuronsInPreviouseLayer; List <Neuron> neurons = new List <Neuron>(); for (int i = 0; i < numberOfNeurons; i++) { neurons.Add(new Neuron(activator, weightInitializer, numberOfNeuronsInPreviouseLayer, lrat)); } Neurons = new List <Neuron>(neurons); }
public ConvolutionalLayer(int nk, int ks, int li, FilterMeta ifm, IWeightInitializer wi, LearningRateAnnealerType lrat) : base(li, ifm) { _numberOfKernels = nk; _kernelSize = ks; List <Kernel> temp = new List <Kernel>(); for (int i = 0; i < _numberOfKernels; i++) { var k = new Kernel(ks, ifm.Channels, lrat); k.RandomizeWeights(wi); temp.Add(k); } _kernels = new List <Kernel>(temp); _inputeFm = ifm; _outputFm = GetOutputFilterMeta(); _featureMaps = new double[_outputFm.Channels, _outputFm.Size, _outputFm.Size]; }
public Conv2D(int filters, int[] kernel_size = null, int[] strides = null, PaddingType padding = PaddingType.Valid, DataFormatType?data_format = null, int[] dilation_rate = null, IActivationFunction activation = null, bool use_bias = true, IWeightInitializer kernel_initializer = null, IWeightInitializer bias_initializer = null, IWeightRegularizer kernel_regularizer = null, IWeightRegularizer bias_regularizer = null, IWeightRegularizer activity_regularizer = null, IWeightConstraint kernel_constraint = null, IWeightConstraint bias_constraint = null, int?[] input_shape = null) : base(rank: 2, filters: filters, kernel_size: kernel_size, strides: strides, padding: padding, data_format: data_format, dilation_rate: dilation_rate, activation: activation, use_bias: use_bias, kernel_initializer: kernel_initializer, bias_initializer: bias_initializer, kernel_regularizer: kernel_regularizer, bias_regularizer: bias_regularizer, activity_regularizer: activity_regularizer, kernel_constraint: kernel_constraint, bias_constraint: bias_constraint, input_shape: input_shape) { this.input_spec = new List <InputSpec> { new InputSpec(ndim: 4) }; }
/// <summary> /// Initializes a new instance of the <see cref="Dense"/> class. /// </summary> /// /// <param name="units">Positive integer, dimensionality of the output space.</param> /// <param name="input_dim">The input dim.</param> /// <param name="batch_input_shape">The batch input shape.</param> /// <param name="input_shape">The input shape.</param> /// <param name="activation">The activation function to use.</param> /// <param name="use_bias">Whether the layer uses a bias vector.</param> /// <param name="spectral_norm_iteration">Whether the layer uses a bias vector.</param> /// public Dense(int units, IActivationFunction activation = null, bool use_bias = true, IWeightInitializer kernel_initializer = null, IWeightInitializer bias_initializer = null, IWeightRegularizer kernel_regularizer = null, IWeightRegularizer bias_regularizer = null, IWeightRegularizer activity_regularizer = null, IWeightConstraint kernel_constraint = null, IWeightConstraint bias_constraint = null, int?input_dim = null, int?[] input_shape = null, int?[] batch_input_shape = null, int spectral_norm_iteration = 0) : base(input_dim: input_dim, input_shape: input_shape, batch_input_shape: batch_input_shape) { // https://github.com/fchollet/keras/blob/f65a56fb65062c8d14d215c9f4b1015b97cc5bf3/keras/layers/core.py#L791 //if 'input_shape' not in kwargs and 'input_dim' in kwargs: // kwargs['input_shape'] = (kwargs.pop('input_dim'),) if (bias_initializer == null) { bias_initializer = new Zeros(); } if (kernel_initializer == null) { kernel_initializer = new GlorotUniform(); } this.units = units; this.activation = activation; this.use_bias = use_bias; this.kernel_initializer = kernel_initializer; this.bias_initializer = bias_initializer; this.kernel_regularizer = kernel_regularizer; this.bias_regularizer = bias_regularizer; this.activity_regularizer = activity_regularizer; this.kernel_constraint = kernel_constraint; this.bias_constraint = bias_constraint; this.input_spec = new List <InputSpec>(); this.input_spec.Add(new InputSpec(min_ndim: 2)); this.supports_masking = true; this.spectral_norm_iteration = spectral_norm_iteration; }
public static ISynapseFactory GetInstance(IWeightInitializer weightInitializer, IAxonFactory axonFactory) { return new SynapseFactory(weightInitializer, axonFactory); }
/// <summary> /// Create a network with random weights and biases /// </summary> /// <param name="layerConfig">The layer configuration containing the number of neurons in each layer in this order: [input layer][1st hidden layer][2nd hidden layer]...[nth hidden layer][output layer]</param> /// <param name="activationFunction">The activation function to use</param> /// <param name="weightInitializer">The weight and bias initializer to use</param> /// <returns></returns> public static Network CreateNetworkInitRandom(int[] layerConfig, IActivationFunction activationFunction, IWeightInitializer weightInitializer = null) { if (weightInitializer == null) { weightInitializer = new DefaultWeightInitializer(); } List <List <Tuple <List <float>, float> > > inputLayers = new List <List <Tuple <List <float>, float> > >(); for (int layId = 1; layId < layerConfig.Length; ++layId) { int prevLayerSize = layerConfig[layId - 1]; int layerSize = layerConfig[layId]; List <Tuple <List <float>, float> > neuronList = new List <Tuple <List <float>, float> >(); for (int i = 0; i < layerSize; i++) { List <float> weights = new List <float>(); for (int j = 0; j < prevLayerSize; ++j) { weights.Add(weightInitializer.GetRandomWeight(prevLayerSize)); } neuronList.Add(new Tuple <List <float>, float>(weights, weightInitializer.GetRandomBias())); } inputLayers.Add(neuronList); } return(CreateNetwork(inputLayers, activationFunction)); }
public static NeuralNetworkFactory GetInstance(ISomaFactory somaFactory, IAxonFactory axonFactory, ISynapseFactory hiddenSynapseFactory, ISynapseFactory inputOutputSynapseFactory, IWeightInitializer biasInitializer, INeuronFactory neuronFactory) { return(new NeuralNetworkFactory(somaFactory, axonFactory, hiddenSynapseFactory, inputOutputSynapseFactory, biasInitializer, neuronFactory)); }
public static ISynapseFactory GetInstance(IWeightInitializer weightInitializer, IAxonFactory axonFactory) { return(new SynapseFactory(weightInitializer, axonFactory)); }
public static NeuralNetworkFactory GetInstance(ISomaFactory somaFactory, IAxonFactory axonFactory, ISynapseFactory hiddenSynapseFactory, ISynapseFactory inputOutputSynapseFactory, IWeightInitializer biasInitializer) { return new NeuralNetworkFactory(somaFactory, axonFactory, hiddenSynapseFactory, inputOutputSynapseFactory, biasInitializer); }
//////////////////////////////////////////////////////////////////////////////////////////////////// /// <summary> /// Initializes a new instance of the Autoencoders.AutoencoderWeights class. /// </summary> /// /// <param name="PNumLayers"> Number of layers. </param> /// <param name="PLayers"> The layers. </param> /// <param name="PWInitializer"> The password initializer. </param> //////////////////////////////////////////////////////////////////////////////////////////////////// public AutoencoderWeights(int PNumLayers, RestrictedBoltzmannMachineLayer[] PLayers, IWeightInitializer PWInitializer) { numweightsets = PNumLayers - 1; weights = new RestrictedBoltzmannMachineWeightSet[numweightsets]; for (int i = 0; i < numweightsets; i++) { weights[i] = new RestrictedBoltzmannMachineWeightSet(PLayers[i].Count, PLayers[i + 1].Count, PWInitializer); } }
private SynapseFactory(IWeightInitializer weightInitializer, IAxonFactory axonFactory) { _weightInitializer = weightInitializer; _axonFactory = axonFactory; }
//////////////////////////////////////////////////////////////////////////////////////////////////// /// <summary> /// Initializes a new instance of the Autoencoders.RestrictedBoltzmannMachineWeightSet class. /// </summary> /// /// <param name="PPreSynapticLayerSize"> Size of the pre synaptic layer. </param> /// <param name="PPostSynapticLayerSize"> Size of the post synaptic layer. </param> /// <param name="PWeightInit"> The weight initialize. </param> //////////////////////////////////////////////////////////////////////////////////////////////////// public RestrictedBoltzmannMachineWeightSet(int PPreSynapticLayerSize, int PPostSynapticLayerSize, IWeightInitializer PWeightInit) { preSize = PPreSynapticLayerSize; postSize = PPostSynapticLayerSize; weights = new double[preSize][]; weightChanges = new double[preSize][]; for (int i = 0; i < preSize; i++) { weights[i] = new double[postSize]; weightChanges[i] = new double[postSize]; Utility.SetArrayToZero(weightChanges[i]); for (int j = 0; j < postSize; j++) { weights[i][j] = PWeightInit.InitializeWeight(); } } }
public static IMutator GetInstance(INeuralNetworkFactory networkFactory, IWeightInitializer weightInitializer, MutationConfigurationSettings config) { return new Mutator(networkFactory, weightInitializer, config); }
public static IBreederFactory GetInstance(INeuralNetworkFactory networkFactory, IWeightInitializer weightInitializer) { return new BreederFactory(networkFactory, weightInitializer); }
private BreederFactory(INeuralNetworkFactory networkFactory, IWeightInitializer weightInitializer) { _networkFactory = networkFactory; _weightInitializer = weightInitializer; }