protected override VectorBatch _backPropagate(VectorBatch outputGradient) { if (outputGradient == null || outputGradient.Dimension != NumberOfOutputs) { throw new ArgumentException("outputgradient may not be null and must have dimension equal to the number of units."); } return(VectorBatch.ApplyFunction((x, y) => x * (1 - x) * y, _output, outputGradient)); }
protected override VectorBatch _run(VectorBatch inputbatch) { _input = inputbatch; if (_neuralFunction != null) { _output = VectorBatch.ApplyFunction(x => _neuralFunction(x), _input); } else { _output = inputbatch; } return(_output); }
protected override VectorBatch _run(VectorBatch inputbatch) { if (inputbatch == null || inputbatch.Dimension != NumberOfInputs) { throw new ArgumentException("input may not be null and must have dimension equal to the number of units."); } _input = inputbatch; VectorBatch result = inputbatch.SubractVectorMaxima(); result = VectorBatch.ApplyFunction(x => Math.Exp(x), result); result = result.DivideByComponentSums(); _output = result; return(_output); }
protected override VectorBatch _backPropagate(VectorBatch outputGradient) { if (outputGradient == null || outputGradient.Dimension != NumberOfOutputs) { throw new ArgumentException("outputgradient may not be null and must have dimension equal to NumberOfNeurons."); } if (_neuralFunctionDerivative == null) { return(outputGradient); } VectorBatch derivative = VectorBatch.ApplyFunction((x, y) => _neuralFunctionDerivative(x, y), _input, _output); VectorBatch result = VectorBatch.ApplyFunction((x, y) => x * y, derivative, outputGradient); return(result); }