public void SumTest(string mode, string test, Dictionary <string, string> environmentVariables) { RemoteExecutor.RemoteInvoke((arg0, arg1) => { CheckProperFlag(arg0); float[] src = (float[])_testArrays[int.Parse(arg1)].Clone(); float expected = 0; for (int i = 0; i < src.Length; i++) { expected += src[i]; } var actual = CpuMathUtils.Sum(src); Assert.Equal(expected, actual, 2); return(RemoteExecutor.SuccessExitCode); }, mode, test, new RemoteInvokeOptions(environmentVariables)); }
public void AddScaleCopyUTest(int test, float defaultScale) { float[] src = (float[])_testArrays[test].Clone(); float[] dst = (float[])src.Clone(); float[] result = (float[])dst.Clone(); float[] expected = (float[])dst.Clone(); for (int i = 0; i < expected.Length; i++) { expected[i] *= (1 + defaultScale); } CpuMathUtils.AddScaleCopy(defaultScale, src, dst, result, dst.Length); var actual = result; Assert.Equal(expected, actual, _comparer); }
/// <summary> /// Compute Standard Deviation. /// We have two overloads of StdDev instead of one with <see cref="Nullable{Float}"/> mean for perf reasons. /// </summary> private static Float StdDev(Float[] values, int count, int length, Float mean) { Contracts.Assert(0 <= count && count <= length); if (count == 0) { return(0); } Float sumSq = 0; if (count != length && mean != 0) { // Sparse representation. Float meanSq = mean * mean; sumSq = (length - count) * meanSq; } sumSq += CpuMathUtils.SumSq(mean, values, 0, count); return(MathUtils.Sqrt(sumSq / length)); }
public void SdcaL1UpdateUTest(int test) { float[] src = (float[])testArrays[test].Clone(); float[] v = (float[])src.Clone(); float[] w = (float[])src.Clone(); float[] expected = (float[])w.Clone(); for (int i = 0; i < expected.Length; i++) { float value = src[i] * (1 + DEFAULT_SCALE); expected[i] = Math.Abs(value) > DEFAULT_SCALE ? (value > 0 ? value - DEFAULT_SCALE : value + DEFAULT_SCALE) : 0; } CpuMathUtils.SdcaL1UpdateDense(DEFAULT_SCALE, src.Length, src, DEFAULT_SCALE, v, w); var actual = w; Assert.Equal(expected, actual, comparer); }
public void SdcaL1UpdateUTest(int test) { float[] src = (float[])_testArrays[test].Clone(); float[] v = (float[])src.Clone(); float[] w = (float[])src.Clone(); float[] expected = (float[])w.Clone(); for (int i = 0; i < expected.Length; i++) { float value = src[i] * (1 + DefaultScale); expected[i] = Math.Abs(value) > DefaultScale ? (value > 0 ? value - DefaultScale : value + DefaultScale) : 0; } CpuMathUtils.SdcaL1UpdateDense(DefaultScale, src.Length, src, DefaultScale, v, w); var actual = w; Assert.Equal(expected, actual, _comparer); }
public void MaxAbsDiffUTest(int test, float defaultScale) { float[] src = (float[])_testArrays[test].Clone(); var actual = CpuMathUtils.MaxAbsDiff(defaultScale, src); float expected = 0; for (int i = 0; i < src.Length; i++) { float abs = Math.Abs(src[i] - defaultScale); if (abs > expected) { expected = abs; } } Assert.Equal(expected, actual, 2); }
private ValueGetter <VBuffer <Float> > GetterFromFloatType(IRow input, int iinfo) { var getSrc = GetSrcGetter <Float>(input, iinfo); var src = default(Float); var featuresAligned = new AlignedArray(RoundUp(1, _cfltAlign), CpuMathUtils.GetVectorAlignment()); var productAligned = new AlignedArray(RoundUp(_transformInfos[iinfo].NewDim, _cfltAlign), CpuMathUtils.GetVectorAlignment()); var oneDimensionalVector = new VBuffer <Float>(1, new Float[] { 0 }); return ((ref VBuffer <Float> dst) => { getSrc(ref src); oneDimensionalVector.Values[0] = src; TransformFeatures(Host, ref oneDimensionalVector, ref dst, _transformInfos[iinfo], featuresAligned, productAligned); }); }
public void SumAbsDiffUTest(string mode, string test, string scale, Dictionary <string, string> environmentVariables) { RemoteExecutor.RemoteInvoke((arg0, arg1, arg2) => { CheckProperFlag(arg0); float defaultScale = float.Parse(arg2, CultureInfo.InvariantCulture); float[] src = (float[])_testArrays[int.Parse(arg1)].Clone(); var actual = CpuMathUtils.SumAbs(defaultScale, src); float expected = 0; for (int i = 0; i < src.Length; i++) { expected += Math.Abs(src[i] - defaultScale); } Assert.Equal(expected, actual, 2); return(RemoteExecutor.SuccessExitCode); }, mode, test, scale, new RemoteInvokeOptions(environmentVariables)); }
public void ScaleAddUTest(string mode, string test, string scale, Dictionary <string, string> environmentVariables) { RemoteExecutor.RemoteInvoke((arg0, arg1, arg2) => { CheckProperFlag(arg0); float defaultScale = float.Parse(arg2, CultureInfo.InvariantCulture); float[] dst = (float[])_testArrays[int.Parse(arg1)].Clone(); float[] expected = (float[])dst.Clone(); for (int i = 0; i < expected.Length; i++) { expected[i] = defaultScale * (dst[i] + defaultScale); } CpuMathUtils.ScaleAdd(defaultScale, defaultScale, dst); var actual = dst; Assert.Equal(expected, actual, _comparer); return(RemoteExecutor.SuccessExitCode); }, mode, test, scale, new RemoteInvokeOptions(environmentVariables)); }
/// <summary> /// Compute Standard Deviation. In case of both subMean and useStd are true, we technically need to compute variance /// based on centered values (i.e. after subtracting the mean). But since the centered /// values mean is approximately zero, we can use variance of non-centered values. /// </summary> private static Float StdDev(Float[] values, int count, int length) { Contracts.Assert(0 <= count && count <= length); if (count == 0) { return(0); } // We need a mean to compute variance. Float tmpMean = CpuMathUtils.Sum(values, 0, count) / length; Float sumSq = 0; if (count != length && tmpMean != 0) { // Sparse representation. Float meanSq = tmpMean * tmpMean; sumSq = (length - count) * meanSq; } sumSq += CpuMathUtils.SumSq(tmpMean, values, 0, count); return(MathUtils.Sqrt(sumSq / length)); }
public void SdcaL1UpdateSUTest(int test) { float[] src = (float[])testArrays[test].Clone(); float[] v = (float[])src.Clone(); float[] w = (float[])src.Clone(); int[] idx = testIndexArray; float[] expected = (float[])w.Clone(); for (int i = 0; i < idx.Length; i++) { int index = idx[i]; float value = v[index] + src[i] * DEFAULT_SCALE; expected[index] = Math.Abs(value) > DEFAULT_SCALE ? (value > 0 ? value - DEFAULT_SCALE : value + DEFAULT_SCALE) : 0; } CpuMathUtils.SdcaL1UpdateSparse(DEFAULT_SCALE, src.Length, src, idx, idx.Length, DEFAULT_SCALE, v, w); var actual = w; Assert.Equal(expected, actual, comparer); }
public void SdcaL1UpdateSUTest(int test) { float[] src = (float[])_testArrays[test].Clone(); float[] v = (float[])src.Clone(); float[] w = (float[])src.Clone(); int[] idx = _testIndexArray; float[] expected = (float[])w.Clone(); for (int i = 0; i < idx.Length; i++) { int index = idx[i]; float value = v[index] + src[i] * DefaultScale; expected[index] = Math.Abs(value) > DefaultScale ? (value > 0 ? value - DefaultScale : value + DefaultScale) : 0; } CpuMathUtils.SdcaL1UpdateSparse(DefaultScale, idx.Length, src, idx, DefaultScale, v, w); var actual = w; Assert.Equal(expected, actual, _comparer); }
private static Float L2DistSquaredHalfSparse(Float[] valuesA, int lengthA, Float[] valuesB, int[] indicesB, int countB) { Contracts.AssertValueOrNull(valuesA); Contracts.AssertValueOrNull(valuesB); Contracts.AssertValueOrNull(indicesB); Contracts.Assert(0 <= lengthA && lengthA <= Utils.Size(valuesA)); Contracts.Assert(0 <= countB && countB <= Utils.Size(indicesB)); Contracts.Assert(countB <= Utils.Size(valuesB)); var normA = CpuMathUtils.SumSq(valuesA.AsSpan(0, lengthA)); if (countB == 0) { return(normA); } var normB = CpuMathUtils.SumSq(valuesB.AsSpan(0, countB)); var dotP = CpuMathUtils.DotProductSparse(valuesA, valuesB, indicesB, countB); var res = normA + normB - 2 * dotP; return(res < 0 ? 0 : res); }
public void AddScaleCopyUTest(string mode, string test, string scale, Dictionary <string, string> environmentVariables) { RemoteExecutor.RemoteInvoke((arg0, arg1, arg2) => { CheckProperFlag(arg0); float defaultScale = float.Parse(arg2); float[] src = (float[])_testArrays[int.Parse(arg1)].Clone(); float[] dst = (float[])src.Clone(); float[] result = (float[])dst.Clone(); float[] expected = (float[])dst.Clone(); for (int i = 0; i < expected.Length; i++) { expected[i] *= (1 + defaultScale); } CpuMathUtils.AddScaleCopy(defaultScale, src, dst, result, dst.Length); var actual = result; Assert.Equal(expected, actual, _comparer); return(RemoteExecutor.SuccessExitCode); }, mode, test, scale, new RemoteInvokeOptions(environmentVariables)); }
public void AddSUTest(int test) { float[] src = (float[])_testArrays[test].Clone(); float[] dst = (float[])src.Clone(); int[] idx = _testIndexArray; float[] expected = (float[])dst.Clone(); expected[0] = 3.92f; expected[2] = -12.14f; expected[5] = -36.69f; expected[6] = 46.29f; expected[8] = -104.41f; expected[11] = -13.09f; expected[12] = -73.92f; expected[13] = -23.64f; expected[14] = 34.41f; CpuMathUtils.Add(src, idx, dst, idx.Length); var actual = dst; Assert.Equal(expected, actual, _comparer); }
public void AddScaleSUTest(int test) { float[] src = (float[])_testArrays[test].Clone(); float[] dst = (float[])src.Clone(); int[] idx = _testIndexArray; float[] expected = (float[])dst.Clone(); expected[0] = 5.292f; expected[2] = -13.806f; expected[5] = -43.522f; expected[6] = 55.978f; expected[8] = -178.869f; expected[11] = -31.941f; expected[12] = -51.205f; expected[13] = -21.337f; expected[14] = 35.782f; CpuMathUtils.AddScale(DefaultScale, src, idx, dst, idx.Length); var actual = dst; Assert.Equal(expected, actual, _comparer); }
public void AddScaleSUTest(string mode, string test, string scale, Dictionary <string, string> environmentVariables) { RemoteExecutor.RemoteInvoke((arg0, arg1, arg2) => { CheckProperFlag(arg0); float defaultScale = float.Parse(arg2, CultureInfo.InvariantCulture); float[] src = (float[])_testArrays[int.Parse(arg1)].Clone(); float[] dst = (float[])src.Clone(); int[] idx = _testIndexArray; float[] expected = (float[])dst.Clone(); CpuMathUtils.AddScale(defaultScale, src, idx, dst, idx.Length); for (int i = 0; i < idx.Length; i++) { int index = idx[i]; expected[index] += defaultScale * src[i]; } Assert.Equal(expected, dst, _comparer); return(RemoteExecutor.SuccessExitCode); }, mode, test, scale, new RemoteInvokeOptions(environmentVariables)); }
/// <summary> /// Adds a multiple of a <see cref="VBuffer{T}"/> to a <see cref="Float"/> array. /// </summary> /// <param name="src">Buffer to add</param> /// <param name="dst">Array to add to</param> /// <param name="c">Coefficient</param> public static void AddMult(ref VBuffer <Float> src, Float[] dst, Float c) { Contracts.CheckValue(dst, nameof(dst)); Contracts.CheckParam(src.Length == dst.Length, nameof(dst), "Arrays must have the same dimensionality."); if (src.Count == 0 || c == 0) { return; } if (src.IsDense) { CpuMathUtils.AddScale(c, src.Values, dst, src.Count); } else { for (int i = 0; i < src.Count; i++) { dst[src.Indices[i]] += c * src.Values[i]; } } }
public void AddUTest(int test) { float[] src = (float[])testArrays[test].Clone(); float[] dst = (float[])src.Clone(); float[] expected = (float[])src.Clone(); // Ensures src and dst are different arrays for (int i = 0; i < dst.Length; i++) { dst[i] += 1; } for (int i = 0; i < expected.Length; i++) { expected[i] = 2 * expected[i] + 1; } CpuMathUtils.Add(src, dst, dst.Length); var actual = dst; Assert.Equal(expected, actual, comparer); }
public TransformInfo(IHost host, ApproximatedKernelMappingEstimator.ColumnOptions column, int d, float avgDist) { Contracts.AssertValue(host); SrcDim = d; NewDim = column.Rank; host.CheckUserArg(NewDim > 0, nameof(column.Rank)); _useSin = column.UseCosAndSinBases; var seed = column.Seed; _rand = seed.HasValue ? RandomUtils.Create(seed) : RandomUtils.Create(host.Rand); _state = _rand.GetState(); var generator = column.Generator; _matrixGenerator = generator.GetRandomNumberGenerator(avgDist); int roundedUpD = RoundUp(NewDim, _cfltAlign); int roundedUpNumFeatures = RoundUp(SrcDim, _cfltAlign); RndFourierVectors = new AlignedArray(roundedUpD * roundedUpNumFeatures, CpuMathUtils.GetVectorAlignment()); RotationTerms = _useSin ? null : new AlignedArray(roundedUpD, CpuMathUtils.GetVectorAlignment()); InitializeFourierCoefficients(roundedUpNumFeatures, roundedUpD); }
public void AddSUTest(string mode, string test, Dictionary <string, string> environmentVariables) { RemoteExecutor.RemoteInvoke((arg0, arg1) => { CheckProperFlag(arg0); float[] src = (float[])_testArrays[int.Parse(arg1)].Clone(); float[] dst = (float[])src.Clone(); int[] idx = _testIndexArray; int limit = int.Parse(arg1) == 2 ? 9 : 18; float[] expected = (float[])dst.Clone(); for (int i = 0; i < limit; i++) { int index = idx[i]; expected[index] += src[i]; } CpuMathUtils.Add(src, idx, dst, limit); var actual = dst; Assert.Equal(expected, actual, _comparer); return(RemoteExecutor.SuccessExitCode); }, mode, test, new RemoteInvokeOptions(environmentVariables)); }
public void SdcaL1UpdateUTest(string mode, string test, string scale, Dictionary <string, string> environmentVariables) { RemoteExecutor.RemoteInvoke((arg0, arg1, arg2) => { CheckProperFlag(arg0); float defaultScale = float.Parse(arg2, CultureInfo.InvariantCulture); float[] src = (float[])_testArrays[int.Parse(arg1)].Clone(); float[] v = (float[])src.Clone(); float[] w = (float[])src.Clone(); float[] expected = (float[])w.Clone(); for (int i = 0; i < expected.Length; i++) { float value = src[i] * (1 + defaultScale); expected[i] = Math.Abs(value) > defaultScale ? (value > 0 ? value - defaultScale : value + defaultScale) : 0; } CpuMathUtils.SdcaL1UpdateDense(defaultScale, src.Length, src, defaultScale, v, w); var actual = w; Assert.Equal(expected, actual, _comparer); return(RemoteExecutor.SuccessExitCode); }, mode, test, scale, new RemoteInvokeOptions(environmentVariables)); }
public static Float DotProduct(ref VBuffer <Float> a, ref VBuffer <Float> b) { Contracts.Check(a.Length == b.Length, "Vectors must have the same dimensionality."); if (a.Count == 0 || b.Count == 0) { return(0); } if (a.IsDense) { if (b.IsDense) { return(CpuMathUtils.DotProductDense(a.Values, b.Values, a.Length)); } return(CpuMathUtils.DotProductSparse(a.Values, b.Values, b.Indices, b.Count)); } if (b.IsDense) { return(CpuMathUtils.DotProductSparse(b.Values, a.Values, a.Indices, a.Count)); } return(DotProductSparse(a.Values, a.Indices, 0, a.Count, b.Values, b.Indices, 0, b.Count, 0)); }
public TransformInfo(IHostEnvironment env, ModelLoadContext ctx, int colValueCount, string directoryName) { env.AssertValue(env); env.Assert(colValueCount > 0); // *** Binary format *** // int: d (number of untransformed features) // int: NewDim (number of transformed features) // bool: UseSin // uint[4]: the seeds for the pseudo random number generator. SrcDim = ctx.Reader.ReadInt32(); env.CheckDecode(SrcDim == colValueCount); NewDim = ctx.Reader.ReadInt32(); env.CheckDecode(NewDim > 0); _useSin = ctx.Reader.ReadBoolByte(); var length = ctx.Reader.ReadInt32(); env.CheckDecode(length == 4); _state = TauswortheHybrid.State.Load(ctx.Reader); _rand = new TauswortheHybrid(_state); env.CheckDecode(ctx.Repository != null && ctx.LoadModelOrNull <IFourierDistributionSampler, SignatureLoadModel>(env, out _matrixGenerator, directoryName)); // initialize the transform matrix int roundedUpD = RoundUp(NewDim, _cfltAlign); int roundedUpNumFeatures = RoundUp(SrcDim, _cfltAlign); RndFourierVectors = new AlignedArray(roundedUpD * roundedUpNumFeatures, CpuMathUtils.GetVectorAlignment()); RotationTerms = _useSin ? null : new AlignedArray(roundedUpD, CpuMathUtils.GetVectorAlignment()); InitializeFourierCoefficients(roundedUpNumFeatures, roundedUpD); }
public void MulElementWiseUTest(int test) { float[] src1 = (float[])testArrays[test].Clone(); float[] src2 = (float[])src1.Clone(); float[] dst = (float[])src1.Clone(); // Ensures src1 and src2 are different arrays for (int i = 0; i < src2.Length; i++) { src2[i] += 1; } float[] expected = (float[])src1.Clone(); for (int i = 0; i < expected.Length; i++) { expected[i] *= (1 + expected[i]); } CpuMathUtils.MulElementWise(src1, src2, dst, dst.Length); var actual = dst; Assert.Equal(expected, actual, comparer); }
private static float DotProduct(float[] a, int aOffset, ReadOnlySpan <float> b, ReadOnlySpan <int> indices, int count) { Contracts.Assert(count <= indices.Length); return(CpuMathUtils.DotProductSparse(a.AsSpan(aOffset), b, indices, count)); }
/// <inheritdoc/> private protected override void TrainWithoutLock(IProgressChannelProvider progress, FloatLabelCursor.Factory cursorFactory, Random rand, IdToIdxLookup idToIdx, int numThreads, DualsTableBase duals, float[] biasReg, float[] invariants, float lambdaNInv, VBuffer <float>[] weights, float[] biasUnreg, VBuffer <float>[] l1IntermediateWeights, float[] l1IntermediateBias, float[] featureNormSquared) { Contracts.AssertValueOrNull(progress); Contracts.Assert(SdcaTrainerOptions.L1Threshold.HasValue); Contracts.AssertValueOrNull(idToIdx); Contracts.AssertValueOrNull(invariants); Contracts.AssertValueOrNull(featureNormSquared); int numClasses = Utils.Size(weights); Contracts.Assert(Utils.Size(biasReg) == numClasses); Contracts.Assert(Utils.Size(biasUnreg) == numClasses); int maxUpdateTrials = 2 * numThreads; var l1Threshold = SdcaTrainerOptions.L1Threshold.Value; bool l1ThresholdZero = l1Threshold == 0; var lr = SdcaTrainerOptions.BiasLearningRate * SdcaTrainerOptions.L2Regularization.Value; var pch = progress != null?progress.StartProgressChannel("Dual update") : null; using (pch) using (var cursor = SdcaTrainerOptions.Shuffle ? cursorFactory.Create(rand) : cursorFactory.Create()) { long rowCount = 0; if (pch != null) { pch.SetHeader(new ProgressHeader("examples"), e => e.SetProgress(0, rowCount)); } Func <DataViewRowId, long> getIndexFromId = GetIndexFromIdGetter(idToIdx, biasReg.Length); while (cursor.MoveNext()) { long idx = getIndexFromId(cursor.Id); long dualIndexInitPos = idx * numClasses; var features = cursor.Features; var label = (int)cursor.Label; float invariant; float normSquared; if (invariants != null) { invariant = invariants[idx]; Contracts.AssertValue(featureNormSquared); normSquared = featureNormSquared[idx]; } else { normSquared = VectorUtils.NormSquared(in features); if (SdcaTrainerOptions.BiasLearningRate == 0) { normSquared += 1; } invariant = _loss.ComputeDualUpdateInvariant(2 * normSquared * lambdaNInv * GetInstanceWeight(cursor)); } // The output for the label class using current weights and bias. var labelOutput = WDot(in features, in weights[label], biasReg[label] + biasUnreg[label]); var instanceWeight = GetInstanceWeight(cursor); // This will be the new dual variable corresponding to the label class. float labelDual = 0; // This will be used to update the weights and regularized bias corresponding to the label class. float labelPrimalUpdate = 0; // This will be used to update the unregularized bias corresponding to the label class. float labelAdjustment = 0; // Iterates through all classes. for (int iClass = 0; iClass < numClasses; iClass++) { // Skip the dual/weights/bias update for label class. Will be taken care of at the end. if (iClass == label) { continue; } var weightsEditor = VBufferEditor.CreateFromBuffer(ref weights[iClass]); var l1IntermediateWeightsEditor = !l1ThresholdZero?VBufferEditor.CreateFromBuffer(ref l1IntermediateWeights[iClass]) : default; // Loop trials for compare-and-swap updates of duals. // In general, concurrent update conflict to the same dual variable is rare // if data is shuffled. for (int numTrials = 0; numTrials < maxUpdateTrials; numTrials++) { long dualIndex = iClass + dualIndexInitPos; var dual = duals[dualIndex]; var output = labelOutput + labelPrimalUpdate * normSquared - WDot(in features, in weights[iClass], biasReg[iClass] + biasUnreg[iClass]); var dualUpdate = _loss.DualUpdate(output, 1, dual, invariant, numThreads); // The successive over-relaxation approach to adjust the sum of dual variables (biasReg) to zero. // Reference to details: http://stat.rutgers.edu/home/tzhang/papers/ml02_dual.pdf, pp. 16-17. var adjustment = l1ThresholdZero ? lr * biasReg[iClass] : lr * l1IntermediateBias[iClass]; dualUpdate -= adjustment; bool success = false; duals.ApplyAt(dualIndex, (long index, ref float value) => success = Interlocked.CompareExchange(ref value, dual + dualUpdate, dual) == dual); if (success) { // Note: dualConstraint[iClass] = lambdaNInv * (sum of duals[iClass]) var primalUpdate = dualUpdate * lambdaNInv * instanceWeight; labelDual -= dual + dualUpdate; labelPrimalUpdate += primalUpdate; biasUnreg[iClass] += adjustment * lambdaNInv * instanceWeight; labelAdjustment -= adjustment; if (l1ThresholdZero) { VectorUtils.AddMult(in features, weightsEditor.Values, -primalUpdate); biasReg[iClass] -= primalUpdate; } else { //Iterative shrinkage-thresholding (aka. soft-thresholding) //Update v=denseWeights as if there's no L1 //Thresholding: if |v[j]| < threshold, turn off weights[j] //If not, shrink: w[j] = v[i] - sign(v[j]) * threshold l1IntermediateBias[iClass] -= primalUpdate; if (SdcaTrainerOptions.BiasLearningRate == 0) { biasReg[iClass] = Math.Abs(l1IntermediateBias[iClass]) - l1Threshold > 0.0 ? l1IntermediateBias[iClass] - Math.Sign(l1IntermediateBias[iClass]) * l1Threshold : 0; } var featureValues = features.GetValues(); if (features.IsDense) { CpuMathUtils.SdcaL1UpdateDense(-primalUpdate, featureValues.Length, featureValues, l1Threshold, l1IntermediateWeightsEditor.Values, weightsEditor.Values); } else if (featureValues.Length > 0) { CpuMathUtils.SdcaL1UpdateSparse(-primalUpdate, featureValues.Length, featureValues, features.GetIndices(), l1Threshold, l1IntermediateWeightsEditor.Values, weightsEditor.Values); } } break; } } } // Updating with label class weights and dual variable. duals[label + dualIndexInitPos] = labelDual; biasUnreg[label] += labelAdjustment * lambdaNInv * instanceWeight; if (l1ThresholdZero) { var weightsEditor = VBufferEditor.CreateFromBuffer(ref weights[label]); VectorUtils.AddMult(in features, weightsEditor.Values, labelPrimalUpdate); biasReg[label] += labelPrimalUpdate; } else { l1IntermediateBias[label] += labelPrimalUpdate; var intermediateBias = l1IntermediateBias[label]; biasReg[label] = Math.Abs(intermediateBias) - l1Threshold > 0.0 ? intermediateBias - Math.Sign(intermediateBias) * l1Threshold : 0; var weightsEditor = VBufferEditor.CreateFromBuffer(ref weights[label]); var l1IntermediateWeightsEditor = VBufferEditor.CreateFromBuffer(ref l1IntermediateWeights[label]); var featureValues = features.GetValues(); if (features.IsDense) { CpuMathUtils.SdcaL1UpdateDense(labelPrimalUpdate, featureValues.Length, featureValues, l1Threshold, l1IntermediateWeightsEditor.Values, weightsEditor.Values); } else if (featureValues.Length > 0) { CpuMathUtils.SdcaL1UpdateSparse(labelPrimalUpdate, featureValues.Length, featureValues, features.GetIndices(), l1Threshold, l1IntermediateWeightsEditor.Values, weightsEditor.Values); } } rowCount++; } } }
/// <summary> /// Returns the L2 norm of the vector (sum of squares of the components). /// </summary> public static Float Norm(Float[] a) { return(MathUtils.Sqrt(CpuMathUtils.SumSq(a))); }
public static Float DotProduct(Float[] a, Float[] b) { Contracts.Check(Utils.Size(a) == Utils.Size(b), "Arrays must have the same length"); Contracts.Check(Utils.Size(a) > 0); return(CpuMathUtils.DotProductDense(a, b, a.Length)); }
private static void FillValues(IExceptionContext ectx, ref VBuffer <Float> src, ref VBuffer <Float> dst, Float divisor, Float scale, Float offset = 0) { int count = src.Count; int length = src.Length; ectx.Assert(Utils.Size(src.Values) >= count); ectx.Assert(divisor >= 0); if (count == 0) { dst = new VBuffer <Float>(length, 0, dst.Values, dst.Indices); return; } ectx.Assert(count > 0); ectx.Assert(length > 0); Float normScale = scale; if (divisor > 0) { normScale /= divisor; } // Don't normalize small values. if (normScale < MinScale) { normScale = 1; } if (offset == 0) { var dstValues = dst.Values; if (Utils.Size(dstValues) < count) { dstValues = new Float[count]; } var dstIndices = dst.Indices; if (!src.IsDense) { if (Utils.Size(dstIndices) < count) { dstIndices = new int[count]; } Array.Copy(src.Indices, dstIndices, count); } CpuMathUtils.Scale(normScale, src.Values, dstValues, count); dst = new VBuffer <Float>(length, count, dstValues, dstIndices); return; } // Subtracting the mean requires a dense representation. src.CopyToDense(ref dst); if (normScale != 1) { CpuMathUtils.ScaleAdd(normScale, -offset, dst.Values, length); } else { CpuMathUtils.Add(-offset, dst.Values, length); } }