public override VectorBatch InputGradient(VectorBatch outputgradient) { if (outputgradient == null || outputgradient.Dimension != NumberOfOutputs) { throw new ArgumentException("outputgradient may not be null and must have dimension equal to NumberOfNeurons."); } if (_neuralFunctionDerivative == null) { return(outputgradient); } return(new VectorBatch( _inputBatch.AsMatrix().Map2((x, y) => _neuralFunctionDerivative(x, y), _outputBatch.AsMatrix()) )); //VectorBatch derivative = new VectorBatch( // _inputBatch.AsMatrix().Map2((x, y) => _neuralFunctionDerivative(x, y), _outputBatch.AsMatrix()) // ); //VectorBatch result = new VectorBatch( // derivative.AsMatrix().Map2((x, y) => x * y, outputgradient.AsMatrix() ) // ); //return result; }
public override void BackPropagate(VectorBatch outputgradient, VectorBatch input) { foreach (var pair in _segmentAndPair(input, outputgradient)) { _component.BackPropagate(pair.Second, pair.First); } }
protected void _runAndBackPropagate(BatchPair tv) { VectorBatch result = _component.Run(tv.First); _costAccumulator += _costFunction.Cost(tv.Second, result); _component.BackPropagate(_costFunction.Gradient(tv.Second, result)); }
public VectorBatch ActivationGradient(VectorBatch outputgradient) { if (_neuralFunction == null) { return(outputgradient); } return(_neuralFunction.InputGradient(outputgradient)); }
public NeuralFunction(int numberofunits) { _numberOfUnits = numberofunits; _neuralFunction = null; _inputVector = new NetworkVector(numberofunits); _inputBatch = null; _outputVector = new NetworkVector(numberofunits); }
public override VectorBatch InputGradient(VectorBatch outputgradients) { if (outputgradients == null || outputgradients.Dimension != NumberOfOutputs) { throw new ArgumentException("outputgradient may not be null and must have dimension equal to NumberOfNeurons."); } return(Weights.LeftMultiplyBy(outputgradients)); }
public override double Cost(VectorBatch target, VectorBatch batch) { _workingVector = null; _workingBatchMatrix = batch.AsMatrix().Map(x => Math.Exp(x)); Vector <double> sum = _workingBatchMatrix.RowSums(); _workingBatchMatrix = _workingBatchMatrix.NormalizeRows(1.0); _workingBatchMatrix = target.AsMatrix().Map2((x, y) => Math.Log(y) * x, _workingBatchMatrix); return(_workingBatchMatrix.RowSums().Sum() / (batch.Count * batch.Dimension)); }
public override VectorBatch InputGradient(VectorBatch outputgradients) { if (outputgradients == null || outputgradients.Dimension != NumberOfOutputs) { throw new ArgumentException("outputgradient may not be null and must have dimension equal to the number of units."); } return(new VectorBatch( (_output.AsMatrix().Map2((x, y) => x * (1 - x) * y, outputgradients.AsMatrix())) )); }
protected override VectorBatch _run(VectorBatch inputbatch) { if (inputbatch.Dimension != NumberOfInputs) { throw new ArgumentException("The dimension of the input does not match this WeightedCombiner."); } VectorInput = null; BatchInput = inputbatch; VectorBatch result = Weights.TransposeAndLeftMultiplyBy(inputbatch); result.AddVectorToEachRow(Biases); return(result); }
protected override VectorBatch _run(VectorBatch inputbatch) { if (inputbatch.Dimension != NumberOfInputs) { throw new ArgumentException("Input dimension does not match this Layer."); } VectorBatch result = _combiner.Run(inputbatch); if (_neuralFunction != null) { result = _neuralFunction.Run(result); } return(result); }
public override VectorBatch InputGradient(VectorBatch outputgradients) { if (outputgradients == null || outputgradients.Dimension != NumberOfOutputs) { throw new ArgumentException("outputgradient may not be null and must have dimension equal to NumberOfNeurons."); } List <VectorBatch> inputGradientParts = new List <VectorBatch>(); foreach (VectorBatch outputGradientPart in _segment(outputgradients)) { inputGradientParts.Add(Weights.LeftMultiplyBy(outputGradientPart)); } return(VectorBatch.Concatenate(inputGradientParts)); }
public override VectorBatch Run(VectorBatch inputbatch) { _inputVector = null; _inputBatch = inputbatch; _outputVector = null; if (_neuralFunction != null) { _outputBatch = new VectorBatch(inputbatch.AsMatrix().Map(x => _neuralFunction(x))); } else { _outputBatch = inputbatch; } return(_outputBatch); }
public override NetworkVector Run(NetworkVector inputvalues) { _inputVector = inputvalues; _inputBatch = null; _outputBatch = null; if (_neuralFunction != null) { _outputVector = NetworkVector.ApplyFunctionComponentWise(inputvalues.Copy(), x => _neuralFunction(x)); } else { _outputVector = inputvalues.Copy(); } return(_outputVector); }
public void BackPropagate(VectorBatch outputgradients) { VectorBatch currentGradient = outputgradients; NetComponent currentComponent; _networkComponentNode node = _tail; { while (node != null) { currentComponent = node.Component; if (node.IsTrainable) { (currentComponent as TrainableComponent).BackPropagate(currentGradient); } currentGradient = currentComponent.InputGradient(currentGradient); node = node.Previous; } } }
public override VectorBatch InputGradient(VectorBatch outputgradients) { if (NumberOfComponents == 0) { throw new InvalidOperationException("Attempt to back propogate in a network with no layers."); } if (outputgradients.Dimension != _tail.Component.NumberOfOutputs) { throw new ArgumentException(string.Format("The network has {0} outputs, but outputgradient has dimension {1}", _tail.Component.NumberOfOutputs, outputgradients.Dimension)); } VectorBatch gradient = outputgradients; foreach (NetComponent component in BackwardsEnumeration) { gradient = component.InputGradient(gradient); } return(gradient); }
public override VectorBatch Run(VectorBatch inputbatch) { if (NumberOfComponents == 0) { throw new InvalidOperationException("Attempt to run a network with no layers."); } if (inputbatch.Dimension != NumberOfInputs) { throw new ArgumentException(string.Format("The network accepts {0} inputs, but input has dimension {1}", NumberOfInputs, inputbatch.Dimension)); } VectorBatch result = inputbatch; foreach (NetComponent component in ForwardEnumeration) { result = component.Run(result); } return(result); }
public override VectorBatch Run(VectorBatch inputbatch) { if (inputbatch == null || inputbatch.Dimension != _numberOfUnits) { throw new ArgumentException("inputvalues may not be null and must have dimension equal to the number of units."); } double max; double sum; Matrix <double> result = Matrix <double> .Build.DenseOfMatrix(inputbatch.AsMatrix()); foreach (Vector <double> row in result.EnumerateRows()) { max = row.Max(); row.Map(x => Math.Exp(x - max)); sum = row.Sum(); row.Map(x => x / sum); } return(new VectorBatch(result)); }
protected override VectorBatch _run(VectorBatch inputbatch) { if (inputbatch.Dimension != NumberOfInputs) { throw new ArgumentException("Input dimension does not match this Layer."); } VectorInput = null; BatchInput = inputbatch; List <VectorBatch> outputParts = new List <VectorBatch>(); VectorBatch result; foreach (VectorBatch inputPart in _segment(inputbatch)) { result = Weights.TransposeAndLeftMultiplyBy(inputPart); result.AddVectorToEachRow(Biases); outputParts.Add(result); } return(VectorBatch.Concatenate(outputParts)); }
public WeightsMatrix WeightsGradient(VectorBatch outputgradients, VectorBatch inputs) { return(outputgradients.LeftMultiply(inputs)); }
public VectorBatch TransposeAndLeftMultiplyBy(VectorBatch batch) { return(new VectorBatch(batch.AsMatrix().Multiply(_matrix.Transpose()))); }
public VectorBatch LeftMultiplyBy(VectorBatch batch) { return(new VectorBatch(batch.AsMatrix().Multiply(_matrix))); }
public NetworkVector BiasesGradient(VectorBatch outputgradient) { return(_combiner.BiasesGradient(ActivationGradient(outputgradient))); }
public WeightsMatrix WeightsGradient(VectorBatch outputgradient, VectorBatch input) { return(_combiner.WeightsGradient(ActivationGradient(outputgradient), input)); }
public abstract VectorBatch InputGradient(VectorBatch outputgradient);
public override VectorBatch InputGradient(VectorBatch outputgradients) { return(_combiner.InputGradient(ActivationGradient(outputgradients))); }
public abstract VectorBatch Run(VectorBatch inputbatch);
public override void BackPropagate(VectorBatch outputgradient, VectorBatch input) { _biasesGradientAccumulator.Add(BiasesGradient(outputgradient)); _weightsGradientAccumulator.Add(WeightsGradient(outputgradient, input)); }
public override void BackPropagate(VectorBatch outputgradient) { BackPropagate(outputgradient, BatchInput); }
protected List <VectorBatch> _segment(VectorBatch batchToSegment) { return(batchToSegment.Segment(_repetitions)); }
protected IEnumerable <BatchPair> _segmentAndPair(VectorBatch first, VectorBatch second) { return(_segment(first).Zip(_segment(second), (a, b) => new BatchPair(a, b))); }