/// <summary> /// Classifies the expanded multivariate coordinate. /// </summary> /// <param name="x">The expanded multivariate coordinate. For more information, see <see cref="Poly.Expand"/>.</param> /// <returns>A vector of length <see cref="Ncats"/> giving the conditional probability of category membership for each category. /// Sums to exactly 1.0 (guaranteed).</returns> public double[] ClassifyExpanded(float[] x) { double p = 0.0; if (this.Npoly == 1) { double y = this.EvalPolyFromExpanded(0, x); Quantization q = this.Quant[0]; if (y < q.Ymid[0]) { p = q.P[0]; } else if (y > q.Ymid[q.Nquantiles - 1]) { p = q.P[q.Nquantiles - 1]; } else { p = Static.Linterp(q.Ymid, q.P, y); } return(new double[] { p, 1.0 - p }); } else { double[] output = new double[this.Ncats]; double pSum = 0.0; for (int iCat = 0; iCat < this.Ncats; iCat++) { double y = this.EvalPolyFromExpanded(iCat, x); Quantization q = this.Quant[iCat]; if (y < q.Ymid[0]) { p = q.P[0]; } else if (y > q.Ymid[q.Nquantiles - 1]) { p = q.P[q.Nquantiles - 1]; } else { p = Static.Linterp(q.Ymid, q.P, y); } output[iCat] = p; pSum += p; } for (int iCat = 0; iCat < this.Ncats; iCat++) { output[iCat] /= pSum; } return(output); } }
/// <summary> /// Classifies the polynomial outputs. /// </summary> /// <param name="y">The output of each polynomial.</param> /// <returns>A vector of length <see cref="Ncats"/> giving the conditional probability of category membership for each category. /// Sums to exactly 1.0 (guaranteed).</returns> public double[] ClassifyPolynomialOutputs(float[] y) { double p = 0.0; if (this.Npoly == 1) { Quantization q = this.Quant[0]; if (y[0] < q.Ymid[0]) { p = q.P[0]; } else if (y[0] > q.Ymid[q.Nquantiles - 1]) { p = q.P[q.Nquantiles - 1]; } else { p = Static.Linterp(q.Ymid, q.P, y[0]); } return(new double[] { p, 1.0 - p }); } else { double[] output = new double[this.Ncats]; double pSum = 0.0; for (int iCat = 0; iCat < this.Ncats; iCat++) { Quantization q = this.Quant[iCat]; if (y[iCat] < q.Ymid[0]) { p = q.P[0]; } else if (y[iCat] > q.Ymid[q.Nquantiles - 1]) { p = q.P[q.Nquantiles - 1]; } else { p = Static.Linterp(q.Ymid, q.P, y[iCat]); } output[iCat] = p; pSum += p; } for (int iCat = 0; iCat < this.Ncats; iCat++) { output[iCat] /= pSum; } return(output); } }
/// <summary> /// Copies the quantization data. /// </summary> /// <returns></returns> public Quantization Copy() { Quantization output = new Quantization(this.Nquantiles); output.Pmin = this.Pmin; output.Pmax = this.Pmax; for (int i = 0; i < this.Nquantiles; i++) { output.P[i] = this.P[i]; output.Ymid[i] = this.Ymid[i]; if (i < this.Nquantiles - 1) { output.Ysep[i] = this.Ysep[i]; } } return(output); }
/// <summary> /// This performs the quantization procedure for the data provided. /// </summary> /// <param name="yVals">The y-value of each datum in the sample. These values are the output /// of the polynomial function for the target category.</param> /// <param name="yIdx">On output, provides the zero-based index into yVals that rank-orders the yVals. On output, /// this order is calculated by calling <see cref="Static.QuickSortIndex"/> to sort yVals[yIdx[:]]. /// On input, the order from the previous optimization step is passed in. This means that the list is /// mostly sorted (for small parameter changes), and a mostly sorted list will make the optimization go /// faster than anothr random ordering such as 0...(yVals.Length-1).</param> /// <param name="cats">The category label of each datum.</param> /// <param name="catWeight">The weight assigned to each category.</param> /// <param name="targetCat">The target category for the y-values provided.</param> protected Quantization quantize(float[] yVals, int[] yIdx, byte[] catVec, double[] catWeight, byte targetCat) { // Prepare output Quantization output = new Quantization(this.Nquantiles); double wPerBin = this.totalWeight / (double)this.Nquantiles; output.Pmin = 0.5 / wPerBin; output.Pmax = 1.0 - output.Pmin; // IMPORTANT PERFORMANCE NOTE: // Each time we enter this function, the sort index is preserved from the prior function call. // This typically leads to faster sort times during optimization because typically the list is sorted // already (or partially sorted). Static.QuickSortIndex(yIdx, yVals, 0, yVals.Length); int iBin = 0; // The current bin. double ySum = 0.0; // Weighted sum of y-values for the current bin. double correctWeight = 0.0; // Correct weight for the current bin. double errorWeight = 0.0; // Incorrect weight for the current bin. double binWeight; double wNextBin = wPerBin; // The amount of cucmulative weight that separates the current bin from the next. double wThis = 0.0; // The current accumulated weight. double dwThis; // The amount of weight added by the current sample. double wLast; // The accumulated weight prior to the current sample. // Quantize the samples. for (int iSamp = 0; iSamp < yVals.Length; iSamp++) { int iSort = yIdx[iSamp]; byte idCat = catVec[iSort]; wLast = wThis; dwThis = catWeight[idCat]; wThis += dwThis; if (wThis > wNextBin || iSamp == yVals.Length - 1) { //--------------------------- // It is time for a new bin. //--------------------------- if (iBin < this.Nquantiles - 1 && wNextBin - wLast > wThis - wNextBin) { // Rewind to last sample. iSort = yIdx[--iSamp]; wThis = wLast; } else { // Process this sample as normal. ySum += dwThis * yVals[iSort]; if (idCat == targetCat) { correctWeight += dwThis; } else { errorWeight += dwThis; } } //--------------------------- // Special processing for the last bin. //--------------------------- if (iBin == this.Nquantiles - 1) { // There should not be any more samples, but we are doing this just to be sure. while (++iSamp < yVals.Length) { iSort = yIdx[iSamp]; idCat = catVec[iSort]; dwThis = catWeight[idCat]; wThis += dwThis; ySum += dwThis * yVals[iSort]; if (idCat == targetCat) { correctWeight += dwThis; } else { errorWeight += dwThis; } } } //--------------------------- // Close out the current bin. //--------------------------- binWeight = Math.Min(0.00001, correctWeight + errorWeight); output.P[iBin] = correctWeight / binWeight; output.Ymid[iBin] = ySum / binWeight; if (iBin < this.Nquantiles - 1) { output.Ysep[iBin] = (yVals[iSort] + yVals[yIdx[iSamp + 1]]) / 2.0; } //--------------------------- // Start a next bin. //--------------------------- iBin++; correctWeight = errorWeight = 0.0; wNextBin += wPerBin; } else { // Process each sample. ySum += dwThis * yVals[iSort]; if (idCat == targetCat) { correctWeight += dwThis; } else { errorWeight += dwThis; } } } return(output); }