public void RMSPropValuesAreEqual(float learningRate, float decayRate, float weightDecay, float momentum) { var local = new NeuroWeight <float>(MatrixFactory.RandomMatrix <float>(10, 10, 1e-2f)); var remote = local.Clone(); for (int i = 0; i < 100; i++) { var grad = MatrixFactory.RandomMatrix <float>(10, 10, 1.0f); grad.CopyTo(local.Gradient); grad.CopyTo(remote.Gradient); MathProvider.GravesRmsPropUpdate(weightDecay, learningRate, decayRate, momentum, local); using (var ptrs = new MatrixPointersBag <float>(true, remote.Weight, remote.Gradient, remote.Cache1, remote.Cache2, remote.CacheM)) { Interface.TestRMSPropUpdate(ptrs.Definitions[0], ptrs.Definitions[1], ptrs.Definitions[2], ptrs.Definitions[3], ptrs.Definitions[4], learningRate, decayRate, momentum, weightDecay); } local.Weight.ShouldMatrixEqualWithinError(remote.Weight); local.Cache1.ShouldMatrixEqualWithinError(remote.Cache1); local.Cache2.ShouldMatrixEqualWithinError(remote.Cache2); local.CacheM.ShouldMatrixEqualWithinError(remote.CacheM); local.Gradient.ShouldMatrixEqualWithinError(remote.Gradient); } }
protected override ConfusionMatrix TestInternal(NeuralNet <T> network, IDataSet <T> testSet) { var errors = new List <double>(); int sampleCount = testSet.SampleCount; int classCount = testSet.TargetSize; int batchSize = testSet.BatchSize; ConfusionMatrix confusionMatrix; if (_classNames?.Count > 0) { if (classCount != _classNames?.Count) { throw new InvalidOperationException("Class names count isn't equal to test set class count!"); } confusionMatrix = new ConfusionMatrix(_classNames); } else { confusionMatrix = new ConfusionMatrix(classCount); } for (int i = 0; i < sampleCount; i++) { var sample = testSet.GetNextSample(); var target = sample.Target; var stepResult = network.Step(sample.Input); var predicted = MathProvider.SoftMaxChoice(stepResult); int targetClass = -1; for (int colIdx = 0; colIdx < batchSize; colIdx++) { for (int classIdx = 0; classIdx < classCount; classIdx++) { // TODO: Hacky convertion if ((int)(object)target[classIdx, colIdx] == 1) { targetClass = classIdx; break; } } if (targetClass < 0) { throw new InvalidOperationException("Target vector doesn't contain a positive result!"); } confusionMatrix.Prediction(targetClass, predicted[colIdx]); } errors.Add(MathProvider.CrossEntropyError(MathProvider.SoftMaxNorm(stepResult), sample.Target)); } confusionMatrix.CalculateResult(sampleCount * batchSize); confusionMatrix.Error = errors.Sum() / errors.Count; return(confusionMatrix); }
public override void ClampGrads(float limit) { T min = MathProvider.Scalar(-limit); T max = MathProvider.Scalar(limit); _bias.Gradient.Clamp(min, max); _weights.Gradient.Clamp(min, max); }
public override Matrix <T> Step(Matrix <T> input, bool inTraining = false) { var output = MathProvider.SoftMaxNorm(input); if (inTraining) { Inputs.Add(input); Outputs.Add(output); } return(output); }
public override Matrix <T> Step(Matrix <T> input, bool inTraining = false) { var output = input.CloneMatrix(); MathProvider.ApplySigmoid(output); if (inTraining) { Outputs.Add(output); } return(output); }
public void CanClampMatrix() { var local = MatrixFactory.ParseString <float>(@"2 5 -6 1"); var remote = local.Clone(); local.Clamp(-4.0f, 4.0f); local.AsColumnMajorArray().ShouldArrayEqualWithinError(MathProvider.Array(2.0f, -4.0f, 4.0f, 1.0f)); using (var ptrs = new MatrixPointersBag <float>(true, remote)) { Interface.TestClampMatrix(ptrs.Definitions[0], 4.0f); remote.AsColumnMajorArray().ShouldArrayEqualWithinError(MathProvider.Array(2.0f, -4.0f, 4.0f, 1.0f)); } }
public void CrossEntropyBackpropagationsAreEqual() { var m1 = MatrixFactory.RandomMatrix <float>(100, 100, 1e-5f, 1.0f); var m2 = MatrixFactory.RandomMatrix <float>(100, 100, 1e-5f, 1.0f); var remoteResult = MatrixFactory.Create <float>(100, 100); Matrix <float> local; using (var matrixPtrs = new MatrixPointersBag <float>(true, m1.CloneMatrix(), m2.CloneMatrix(), remoteResult)) { local = MathProvider.BackPropagateCrossEntropyError(m1, m2); Interface.TestCrossEntropyBackprop(matrixPtrs.Definitions[0], matrixPtrs.Definitions[1], matrixPtrs.Definitions[2]); } remoteResult.ShouldMatrixEqualWithinError(local); }
public void CrossEntropyErrorsAreEqual() { var m1 = MatrixFactory.RandomMatrix <float>(100, 100, 1e-5f, 1.0f); var m2 = MatrixFactory.RandomMatrix <float>(100, 100, 1e-5f, 1.0f); using (var matrixPtrs = new MatrixPointersBag <float>(true, m1.CloneMatrix(), m2.CloneMatrix())) { double local = MathProvider.CrossEntropyError(m1, m2); double remote = Interface.TestCrossEntropyError(matrixPtrs.Definitions[0], matrixPtrs.Definitions[1]); double.IsNaN(local).ShouldBeFalse(); double.IsNaN(remote).ShouldBeFalse(); remote.ShouldEqualWithinError(local); } }
/// <summary> /// TODO: based on http://www.remondo.net/calculate-mean-median-mode-averages-csharp/ /// </summary> /// <param name="list"></param> /// <param name="midIndex"></param> /// <returns></returns> public static T Median <T>(IEnumerable <T> list, out int midIndex) { List <T> orderedList = list.OrderBy(numbers => numbers).ToList(); if (orderedList.Count == 0) { throw new Exception("no median calculation possible for empty lists."); } if (orderedList.Count == 1) { midIndex = 0; return(orderedList.First()); } MathProvider <T> mathP = null; if (typeof(T) == typeof(double)) { mathP = new DoubleMathProvider() as MathProvider <T>; } else if (typeof(T) == typeof(long)) { mathP = new LongMathProvider() as MathProvider <T>; } if (mathP == null) { throw new Exception("type not supported: " + typeof(T)); } int listSize = orderedList.Count; T result; midIndex = listSize / 2; if (listSize % 2 == 0) // even { result = mathP.Half(mathP.Add(orderedList.ElementAt(midIndex - 1), orderedList.ElementAt(midIndex))); } else // odd { result = orderedList.ElementAt(midIndex); } return(result); }
public Matrix(int mSize, int nSize) { _array = new T[mSize, nSize]; if (typeof(T) == typeof(double)) { _math = new DoubleMathProvider() as MathProvider <T>; } else if (typeof(T) == typeof(int)) { _math = new IntMathProvider() as MathProvider <T>; } if (_math == null) { throw new InvalidOperationException( "Type " + typeof(T).ToString() + " is not supported by Fraction."); } }
public Polynomial(T A, T B = default(T), T C = default(T)) { this.A = A; this.B = B; this.C = C; if (typeof(T) == typeof(double)) { _mathProvider = new DoubleMathProvider() as MathProvider <T>; } else if (typeof(T) == typeof(int)) { _mathProvider = new IntMathProvider() as MathProvider <T>; } if (_mathProvider == null) { throw new InvalidOperationException( "Type " + typeof(T).ToString() + " is not supported by Fraction."); } }
// Notice this is a type constructor. It gets run the first time a // variable of a specific type is declared for use. // Having _math static reduces overhead. static Fraction() { // This part of the code might be cleaner by once // using reflection and finding all the implementors of // MathProvider and assigning the instance by the one that // matches T. if (typeof(T) == typeof(double)) { _math = new DoubleMathProvider() as MathProvider <T>; } else if (typeof(T) == typeof(int)) { _math = new IntMathProvider() as MathProvider <T>; } // ... assign other options here. if (_math == null) { throw new InvalidOperationException( "Type " + typeof(T).ToString() + " is not supported by Fraction."); } }
public override void ClampGrads(float limit) { T min = MathProvider.Scalar(-limit); T max = MathProvider.Scalar(limit); _whr.Gradient.Clamp(min, max); _whz.Gradient.Clamp(min, max); _whh.Gradient.Clamp(min, max); _wxr.Gradient.Clamp(min, max); _wxz.Gradient.Clamp(min, max); _wxh.Gradient.Clamp(min, max); _bxr.Gradient.Clamp(min, max); _bxz.Gradient.Clamp(min, max); _bxh.Gradient.Clamp(min, max); _bhr.Gradient.Clamp(min, max); _bhz.Gradient.Clamp(min, max); _bhh.Gradient.Clamp(min, max); }
public override double GetError(Matrix <T> output, Matrix <T> target) { return(MathProvider.CrossEntropyError(output, target)); }
protected override void Initialize() { _scale = Matrix <T> .Build.Dense(_size, BatchSize, MathProvider.Scalar(_dropout)); }
public HomeController(MathProvider mathProvider) { this.mathProvider = mathProvider ?? throw new System.ArgumentNullException(nameof(mathProvider)); }
public override void Optimize(NeuroWeight <T> weight) { MathProvider.GravesRmsPropUpdate(_weightDecay, LearningRate, _decayRate, _momentum, weight); }
public override double GetError(Matrix <T> output, Matrix <T> target) { return(MathProvider.MeanSquareError(output, target)); }
public override void Optimize(NeuroWeight <T> weight) { MathProvider.AdagradUpdate(MathProvider.Scalar(LearningRate), weight); }
public override Matrix <T> Step(Matrix <T> input, bool inTraining = false) { if (input.RowCount != _wxh.Weight.ColumnCount) { throw new Exception($"Wrong input matrix row size provided!\nExpected: {_wxh.Weight.ColumnCount}, got: {input.RowCount}"); } if (input.ColumnCount != BatchSize) { throw new Exception($"Wrong input batch size!\nExpected: {BatchSize}, got: {input.ColumnCount}"); } //var z = Bz + Wxz*input + Whz*lastH; var z = (_bxz.Weight.TileColumns(BatchSize) + _bhz.Weight.TileColumns(BatchSize)); z.Accumulate(_wxz.Weight, input); z.Accumulate(_whz.Weight, _lastH); //var r = Br + Wxr*input + Whr*lastH; var r = (_bxr.Weight.TileColumns(BatchSize) + _bhr.Weight.TileColumns(BatchSize)); r.Accumulate(_wxr.Weight, input); r.Accumulate(_whr.Weight, _lastH); //Sigmoid(z); //Sigmoid(r); //ActivationFuncs.ApplySigmoid(r); //ActivationFuncs.ApplySigmoid(z); MathProvider.ApplySigmoid2(r, z); //ApplySigmoid(r, z); var hNew = _bxh.Weight.TileColumns(BatchSize); hNew.Accumulate(_wxh.Weight, input); var hProp = _bhh.Weight.TileColumns(BatchSize); hProp.Accumulate(_whh.Weight, _lastH); hNew = hNew + r.PointwiseMultiply(hProp); MathProvider.ApplyTanh(hNew); //ApplyTanh(hNew); //var H = (z ^ hNew) + ((_hiddenOnes - z) ^ _lastH); var H = Matrix <T> .Build.Dense(hNew.RowCount, hNew.ColumnCount); MathProvider.CalculateH(H, hNew, z, _lastH); if (inTraining) { Outputs.Add(H); _hPropVals.Add(hProp); _hNewVals.Add(hNew); _zVals.Add(z); _rVals.Add(r); Inputs.Add(input); } _lastH = H; return(H); }
public override List <Matrix <T> > BackpropagateError(List <Matrix <T> > outputs, List <Matrix <T> > targets) { return(MathProvider.BackPropagateError(outputs, targets, MathProvider.BackPropagateMeanSquareError)); }
public override void Optimize(NeuroWeight <T> weight) { weight.Timestep++; MathProvider.AdamUpdate(LearningRate, _b1, _b2, weight); }