/// <summary> /// Creates a new array which holds the bias and weight values. /// </summary> /// <returns>The new array which holds the bias and weight values.</returns> public Single1D GetCoefficients() { Single1D coefficients = CreateCoefficients(); GetCoefficients(coefficients, 0); return(coefficients); }
private static void DrawInputs(Graphics graphics, Single1D xs, float x, float y, float w, float h, int nu, int nv) { using (Pen pen = new Pen(Color.Black, 1f)) using (SolidBrush fillBrush = new SolidBrush(Color.Black)) using (StringFormat stringFormat = new StringFormat() { LineAlignment = StringAlignment.Near, Alignment = StringAlignment.Near, }) { float d1 = Min(w / nu, h / nv); for (int i = 0; i < nu; i++) { float x2 = (float)i * d1; for (int j = 0; j < nv; j++) { float y2 = (float)j * d1; float z = xs[i + nu * j]; fillBrush.Color = ColorUtilities.GetColor(z, Color.Black, Color.White); graphics.FillRectangle(fillBrush, x + x2, y + y2, d1, d1); } } graphics.DrawRectangle(pen, x, y, w, h); } }
private static float CostFunction(Single1D aa, Single1D yy, CostFunctionEnum costFunction) { return(costFunction switch { CostFunctionEnum.Quadratic => QuadraticCostFunction(aa, yy), CostFunctionEnum.CrossEntropy => CrossEntropyCostFunction(aa, yy), _ => throw new InvalidCaseException(nameof(costFunction), costFunction, 386203), });
private void RandomizePoint() { int n = multifunc.N; _point = new Single1D(n); for (int i = 0; i < n; i++) { _point[i] = -1f + 2f * (float)_random.NextDouble(); } Console.WriteLine($"new point: {Minimization.ArrayToString(_point)}"); Console.WriteLine(); }
private static void DrawOutputs(Graphics graphics, Single1D requirement, Single1D measurement, float x, float y, float w, float h) { const float sh = 14f; float hb = h - sh; int ny = requirement.Count; float d2 = w / ny; float xgap = 1f; float dx = d2 - 2 * xgap; using (Pen pen_y = new Pen(Color.DarkRed, 1f)) // required y values using (SolidBrush brush_m = new SolidBrush(Color.Gray)) // measured y values using (SolidBrush stringBrush = new SolidBrush(Color.Black)) using (StringFormat stringFormat = new StringFormat() { LineAlignment = StringAlignment.Near, Alignment = StringAlignment.Near, }) { int iMax = 0; float vMax = float.NaN; for (int i = 0; i < ny; i++) { float v = requirement[i]; if (float.IsNaN(vMax) || v > vMax) { vMax = v; iMax = i; } } for (int i = 0; i < ny; i++) { float xi = x + xgap + i * d2; if (measurement != null) { float zz = measurement[i]; float z1 = y + hb * (1f - zz); graphics.FillRectangle(brush_m, xi, z1, dx, hb * zz); } if (requirement != null) { float yy = requirement[i]; float y1 = y + hb * (1f - yy); graphics.DrawLine(pen_y, xi, y1, xi + dx, y1); graphics.DrawLine(pen_y, xi, y1, xi, y + hb); graphics.DrawLine(pen_y, xi + dx, y1, xi + dx, y + hb); } graphics.DrawString(i.ToString(), DefaultFont, stringBrush, xi - 2f, y + hb, stringFormat); } } }
private static float QuadraticCostFunction(Single1D aa, Single1D yy) { float cost = 0; int n = aa.Count; if (n != yy.Count) { throw new UnequalValueException(n, yy.Count, 109047); } for (int i = 0; i < n; i++) { cost += Sqr(aa[i] - yy[i]); } return(cost / (2f * n)); }
private static float CrossEntropyCostFunction(Single1D aa, Single1D yy) { double cost = 0; int n = aa.Count; if (n != yy.Count) { throw new UnequalValueException(n, yy.Count, 109047); } for (int i = 0; i < n; i++) { double a = aa[i]; double y = yy[i]; cost += y * Log(a) + (1 - y) * Log(1 - a); } return(-(float)cost / n); }
private void ConjugateGradient() { Minimization minimization = new Minimization() { MaxIter = maxIter, }; int n = multifunc.N; Single1D p = new Single1D(n); for (int i = 0; i < n; i++) { p[i] = _point[i]; } Single1D df = new Single1D(n); float cost = minimization.ConjugateGradient(p, df, (iter) => multifunc.Calculate(p, df), alpha); Console.WriteLine(); }
private void SteepestDescent() { Minimization minimization = new Minimization() { MaxIter = maxIter, }; int n = multifunc.N; Single1D p = new Single1D(n); Single1D df = new Single1D(n); for (int i = 0; i < n; i++) { p[i] = _point[i]; } minimization.GradientDescent(p, df, (iter) => { return(multifunc.Calculate(p, df)); }, alpha); Console.WriteLine(); }
// ---------------------------------------------------------------------------------------- #region Network public float GetCostAndDerivatives( SampleList samples, Single1D derivatives, MeasurementList measurements, CalculationArguments arguments) { CostFunctionEnum costFunction = arguments.settings.CostFunction; float lambda = arguments.settings.Lambda; int nSamples = samples.Count; int nCoeffs = derivatives.Count; float cost = 0f; Layer last = Last; for (int i = 0; i < nCoeffs; i++) { derivatives[i] = 0f; } for (int iSample = 0; iSample < nSamples; iSample++) { arguments.ThrowIfCancellationRequested(); Sample sample = samples[iSample]; Single1D measurement = measurements[iSample]; Input.SetActivations(sample.Inputs, 0); FeedForward(true); last.GetActivations(measurement, 0); cost += CostFunction(measurement, sample.Requirements, costFunction); int weightCount = CountWeight(); cost += 0.5f * lambda * SumWeightSqr() / weightCount; // regularization last.CalculateDeltas(sample.Requirements, costFunction); FeedBackward(true); AddDerivatives(derivatives, 0, lambda / weightCount); arguments.reporter?.ReportProgress(iSample, nSamples); } arguments.reporter?.ReportProgress(0, nSamples); cost /= nSamples; for (int i = 0; i < nCoeffs; i++) { derivatives[i] /= nSamples; } return(cost); }
public void Learn( SampleList samples, CalculationArguments arguments) // samples = yjks { arguments.reporter?.WriteStart($"Learning the network using a subset of {samples.Count} random samples..."); Stopwatch timer = new Stopwatch(); timer.Start(); int nSamples = samples.Count; // number of sample rows int nCoefficients = CoefficientCount(); // Current biasses and weights of the neurons in this network: Single1D coefficients = new Single1D(nCoefficients); // The derivatives of the cost with respect to the biasses and weights: Single1D derivatives = new Single1D(nCoefficients); Single1D velocities = new Single1D(nCoefficients); velocities.Clear(); MeasurementList measurements = new MeasurementList(nSamples, Last.Count); GetCoefficients(coefficients, 0); Minimization minimization = new Minimization() { MaxIter = arguments.settings.MaxIter, Eps = arguments.settings.Epsilon, Tol = arguments.settings.Tolerance, }; float finalCost = minimization.MomentumBasedGradientDescent(coefficients, derivatives, velocities, (iter) => { SetCoefficients(coefficients, 0); arguments.reporter?.ReportCoefficients(coefficients); float cost = GetCostAndDerivatives(samples, derivatives, measurements, arguments); arguments.reporter?.ReportCostAndDerivatives(cost, derivatives, measurements); return(cost); }, arguments.settings.LearningRate, arguments.settings.MomentumCoefficient); arguments.reporter?.WriteEnd($"The network has learned in {timer.Elapsed.TotalSeconds} s, and the final cost value is {finalCost:F4}."); }
public CoefficientsReportData(Single1D coefficients) { Coefficients = coefficients; }
public override int GetActivations(Single1D activations, int index) { ForEach(layer => index = layer.GetActivations(activations, index)); return(index); }
/// <summary> /// Reads the bias and weight values from the activations array and writes the values to the neurons. /// </summary> /// <param name="activations">The bias and weight values array.</param> /// <param name="index">The start index of the array.</param> /// <returns>The updated start index.</returns> public virtual int SetCoefficients(Single1D coefficients, int index) { return(index); }
/// <summary> /// Reads the activation values from the activations array and writes the values to the neurons. /// </summary> /// <param name="activations">The activation values array.</param> /// <param name="index">The start index of the array.</param> /// <returns>The updated start index.</returns> public virtual int SetActivations(Single1D activations, int index) { return(index); }
public virtual void CalculateDeltas(Single1D ys, CostFunctionEnum costFunction) // This must be the last layer in the network { }
public override int AddDerivatives(Single1D derivatives, int index, float lambdaDivN) { ForEach(layer => index = layer.AddDerivatives(derivatives, index, lambdaDivN)); return(index); }
public CostAndDerivativesReportData(float cost, Single1D derivatives, MeasurementList measurements) { Cost = cost; Derivatives = derivatives; Measurements = measurements; }
public void ReportCostAndDerivatives(float cost, Single1D derivatives, MeasurementList measurements) { Report(new CostAndDerivativesReportData(cost, derivatives, measurements)); }
public void ReportCoefficients(Single1D coefficients) { Report(new CoefficientsReportData(coefficients)); }
// ---------------------------------------------------------------------------------------- #region Constructors public Sample(int nInput, int nRequirements) { Inputs = new Single1D(nInput); Requirements = new Single1D(nRequirements); }
public override int GetCoefficients(Single1D coefficients, int index) { ForEach(layer => index = layer.GetCoefficients(coefficients, index)); return(index); }