/* * Observers not implemented * * public override MyOutputView CreateView() * { * throw new NotImplementedException(); * //return new MyConvolutionLayerView(m_network, this, 0xFFFFDDDD); * }*/ public MyConvolutionLayer(MyAbstractFeedForwardNode network, uint featuresCount, uint kernelWidth, uint kernelHeight, uint xStride = 1, uint yStride = 1, uint[][] featureInputs = null, float[] initialWeight = null, float[] initialBias = null) : base(network) { if (featureInputs == null) { // Full connection m_output.Nb = featuresCount; } else { // Selective connection m_output.Nb = featureInputs.Length; FeatureInputs = featureInputs; } m_weight.Width = kernelWidth; m_weight.Height = kernelHeight; XStride = xStride; YStride = yStride; m_initialWeight = initialWeight; m_initialBias = initialBias; }
/* * Observers not implemented * * public override MyOutputView CreateView() * { * throw new NotImplementedException(); * //return new MyWeightView(m_network, this, 0xFFD8BD99); * }*/ public MyLinearLayer(MyAbstractFeedForwardNode network, float[] initialWeights = null, float[] initialBias = null) : base(network) { m_initialWeight = initialWeights; m_initialBias = initialBias; }
public MyGradientBackPropAgent(MyAbstractFeedForwardNode network, int nGPU, MyMemoryBlock <float> labelInput) : base(network) { m_updateWeightKernel = MyKernelFactory.Instance.Kernel(nGPU, @"XmlFeedForwardNet\UpdateWeightKernel"); DeltaProvider = new MyLabelDeltaProvider(m_network, nGPU); DeltaProvider.LabelInput = labelInput; }
/* * Observers not implemented * * public override MyOutputView CreateView() * { * throw new NotImplementedException(); * //return new MyWeightView(m_network, this, 0xCCAACCCC); * }*/ public MyMirrorNeuronLayer(MyAbstractFeedForwardNode network, MyNeuronLayer originalLayer, float[] initialWeights = null) : base(network) { m_originalLayer = originalLayer; m_initialWeights = initialWeights; }
/* * Observers not implemented * * public override MyOutputView CreateView() * { * throw new NotImplementedException(); * //return new MyWeightView(m_network, this); * }*/ public MyAbstractWeightLayer(MyAbstractFeedForwardNode network) : base(network) { m_weight.Depth = 1; m_weightChange.Depth = 1; m_bias.Depth = 1; m_biasChange.Depth = 1; }
public MyInputLayer(MyAbstractFeedForwardNode network, MyMemoryBlock <float> input, SizeT offset, SizeT nb, SizeT width, SizeT height, SizeT nbSamplesPerStep) : base(network) { m_inputBlock = input; m_inputOffset = offset; m_output.Nb = nb; m_output.Width = width; m_output.Height = height; m_nbSamplesPerStep = nbSamplesPerStep; }
public MyNeuronLayer(MyAbstractFeedForwardNode network, uint neuronsCount, float[] initialWeights = null, float[] initialBias = null) : base(network) { m_neuronsCount = neuronsCount; m_output.Nb = m_neuronsCount; m_output.Width = 1; m_output.Height = 1; m_initialWeight = initialWeights; m_initialBias = initialBias; }
public MyNeuronCopyLayer(MyAbstractFeedForwardNode network, uint neuronsCount, float[] initialWeights = null, float[] initialBias = null) : base(network) { m_neuronsCount = neuronsCount; m_output.Nb = m_neuronsCount; m_output.Width = 1; m_output.Height = 1; m_initialWeight = null; // init biases to 0 m_initialBias = new float[m_output.Count]; }
public MyRBMAgent(MyAbstractFeedForwardNode network, int nGPU, MyMemoryBlock <float> labelInput, uint learningDuration) : base(network) { layers = new List <MyAbstractFBLayer>(); foreach (MyAbstractFBLayer l in network.Layers) { MyNeuronLayer nl = l as MyNeuronLayer; if (nl != null) { layers.Add(nl); } MyNeuronCopyLayer ncl = l as MyNeuronCopyLayer; if (ncl != null) { layers.Add(ncl); } } totalSteps = (int)learningDuration * (layers.Count - 1); }
public MyDeltaProvider(MyAbstractFeedForwardNode network) { m_network = network; }
public MyActivationLayer(MyAbstractFeedForwardNode network, MyActivationFunction activationFunction = MyActivationFunction.NO_ACTIVATION) : base(network) { ActivationFunction = activationFunction; }
public MyBackPropAgent(MyAbstractFeedForwardNode network) { m_network = network; }
/* * Observers not implemented * * public override MyOutputView CreateView() * { * throw new NotImplementedException(); * //return new MyDeltaView(m_network, this); * }*/ public MyAbstractFBLayer(MyAbstractFeedForwardNode network) : base(network) { m_delta.Depth = 1; }
/* * Observers not implemented * * public override MyOutputView CreateView() * { * throw new NotImplementedException(); * //return new MyDeltaView(m_network, this, 0xFFFFFFDD); * }*/ public MySoftmaxLayer(MyAbstractFeedForwardNode network) : base(network) { }
public MyLabelDeltaProvider(MyAbstractFeedForwardNode network, int nGPU) : base(network) { m_combineKernel = MyKernelFactory.Instance.Kernel(nGPU, @"Common\CombineVectorsKernel", "CombineTwoVectorsKernel"); m_energyKernel = MyKernelFactory.Instance.Kernel(nGPU, @"XmlFeedForwardNet\EnergyKernel"); }
/* * Observers not implemented * * public virtual MyOutputView CreateView() * { * throw new NotImplementedException(); * //return new MyOutputView(m_network, this); * }*/ public MyAbstractFLayer(MyAbstractFeedForwardNode network) { m_output.Depth = 1; m_network = network; m_extraSize = 0; }
/* * Observers not implemented * * public override MyOutputView CreateView() * { * throw new NotImplementedException(); * //return new MyDeltaView(m_network, this, 0xDDBBBBDD); * }*/ public MyMirrorPoolLayer(MyAbstractFeedForwardNode network, MyPoolLayer originalLayer) : base(network) { m_originalLayer = originalLayer; }
/* * Observers not implemented * * public override MyOutputView CreateView() * { * throw new NotImplementedException(); * //return new MyDeltaView(m_network, this, 0xFFDDDDFF); * }*/ public MyPoolLayer(MyAbstractFeedForwardNode network, uint stride, MyPoolRule poolRule) : base(network) { Stride = stride; PoolRule = poolRule; }
/* * Observers not implemented * * public override MyOutputView CreateView() * { * throw new NotImplementedException(); * //return new MyWeightView(m_network, this, 0xDDDDBBBB); * }*/ public MyMirrorConvolutionLayer(MyAbstractFeedForwardNode network, MyConvolutionLayer originalLayer, float[] initialWeights = null) : base(network) { m_originalLayer = originalLayer; }