void BatchGD(T[] inputData, T[] knownOutputs) { List <double> wts = new List <double>(new double[Weights.Count]); Parallel.For(0, inputData.Length, (i) => { Parallel.For(0, Weights.Count, (j) => { double delWT = ((dynamic)knownOutputs[i] - Predict(inputData[i])) * System.Math.Pow((double)(dynamic)inputData[i], j); wts[j] += delWT * LearningRate; }); if (OnTraining != null) { TrainingResponse res = new TrainingResponse(); res.Loss = 0.5f * System.Math.Pow((dynamic)knownOutputs[i] - Predict(inputData[i]), 2); OnTraining?.Invoke(this, res); } }); Parallel.For(0, Weights.Count, (j) => { Weights[j] += wts[j] * LearningRate; }); //Slope = Slope + delta_m * LearningRate; //Bias = Bias + delta_c * LearningRate; }
void SGD(T[] inputData, T[] knownOutputs) { Parallel.For(0, inputData.Length, (i) => { double delta_m = inputData[i] * ((dynamic)knownOutputs[i] - Predict(inputData[i])) * DSigmoid((inputData[i])); double delta_c = ((dynamic)knownOutputs[i] - Predict(inputData[i])); Slope += delta_m * LearningRate; Bias += delta_c * LearningRate; if (OnTraining != null) { TrainingResponse res = new TrainingResponse(); res.Loss = 0.5f * System.Math.Pow((dynamic)knownOutputs[i] - Predict(inputData[i]), 2); OnTraining?.Invoke(this, res); } }); }
void SGD(T[] inputData, T[] knownOutputs) { Parallel.For(0, inputData.Length, (i) => { Parallel.For(0, Weights.Count, (j) => { double delWT = ((dynamic)knownOutputs[i] - Predict(inputData[i])) * System.Math.Pow((double)(dynamic)inputData[i], j); Weights[j] += delWT * LearningRate; }); if (OnTraining != null) { TrainingResponse res = new TrainingResponse(); res.Loss = 0.5f * System.Math.Pow((dynamic)knownOutputs[i] - Predict(inputData[i]), 2); OnTraining?.Invoke(this, res); } }); }