private FieldAwareFactorizationMachinePredictor(IHostEnvironment env, ModelLoadContext ctx) : base(env, LoaderSignature) { Host.AssertValue(ctx); // *** Binary format *** // bool: whether to normalize feature vectors // int: number of fields // int: number of features // int: latent dimension // float[]: linear coefficients // float[]: latent representation of features var norm = ctx.Reader.ReadBoolean(); var fieldCount = ctx.Reader.ReadInt32(); Host.CheckDecode(fieldCount > 0); var featureCount = ctx.Reader.ReadInt32(); Host.CheckDecode(featureCount > 0); var latentDim = ctx.Reader.ReadInt32(); Host.CheckDecode(latentDim > 0); LatentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(latentDim); Host.Check(checked (featureCount * fieldCount * LatentDimAligned) <= Utils.ArrayMaxSize, "Latent dimension too large"); var linearWeights = ctx.Reader.ReadFloatArray(); Host.CheckDecode(Utils.Size(linearWeights) == featureCount); var latentWeights = ctx.Reader.ReadFloatArray(); Host.CheckDecode(Utils.Size(latentWeights) == featureCount * fieldCount * latentDim); _norm = norm; FieldCount = fieldCount; FeatureCount = featureCount; LatentDim = latentDim; _linearWeights = linearWeights; _latentWeightsAligned = new AlignedArray(FeatureCount * FieldCount * LatentDimAligned, 16); for (int j = 0; j < FeatureCount; j++) { for (int f = 0; f < FieldCount; f++) { int vBias = j * FieldCount * LatentDim + f * LatentDim; int vBiasAligned = j * FieldCount * LatentDimAligned + f * LatentDimAligned; for (int k = 0; k < LatentDimAligned; k++) { if (k < LatentDim) { _latentWeightsAligned[vBiasAligned + k] = latentWeights[vBias + k]; } else { _latentWeightsAligned[vBiasAligned + k] = 0; } } } } }
internal FieldAwareFactorizationMachinePredictor(IHostEnvironment env, bool norm, int fieldCount, int featureCount, int latentDim, float[] linearWeights, AlignedArray latentWeightsAligned) : base(env, LoaderSignature) { Host.Assert(fieldCount > 0); Host.Assert(featureCount > 0); Host.Assert(latentDim > 0); Host.Assert(Utils.Size(linearWeights) == featureCount); LatentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(latentDim); Host.Assert(latentWeightsAligned.Size == checked (featureCount * fieldCount * LatentDimAligned)); _norm = norm; FieldCount = fieldCount; FeatureCount = featureCount; LatentDim = latentDim; _linearWeights = linearWeights; _latentWeightsAligned = latentWeightsAligned; }
public FieldAwareFactorizationMachineTrainer(IHostEnvironment env, Arguments args) : base(env, LoadName) { Host.CheckUserArg(args.LatentDim > 0, nameof(args.LatentDim), "Must be positive"); Host.CheckUserArg(args.LambdaLinear >= 0, nameof(args.LambdaLinear), "Must be non-negative"); Host.CheckUserArg(args.LambdaLatent >= 0, nameof(args.LambdaLatent), "Must be non-negative"); Host.CheckUserArg(args.LearningRate > 0, nameof(args.LearningRate), "Must be positive"); Host.CheckUserArg(args.Iters >= 0, nameof(args.Iters), "Must be non-negative"); _latentDim = args.LatentDim; _latentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(_latentDim); _lambdaLinear = args.LambdaLinear; _lambdaLatent = args.LambdaLatent; _learningRate = args.LearningRate; _numIterations = args.Iters; _norm = args.Norm; _shuffle = args.Shuffle; _verbose = args.Verbose; _radius = args.Radius; }
/// <summary> /// Initialize model parameters with a trained model. /// </summary> /// <param name="env">The host environment</param> /// <param name="norm">True if user wants to normalize feature vector to unit length.</param> /// <param name="fieldCount">The number of fileds, which is the symbol `m` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf </param> /// <param name="featureCount">The number of features, which is the symbol `n` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf </param> /// <param name="latentDim">The latent dimensions, which is the length of `v_{j, f}` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf </param> /// <param name="linearWeights">The linear coefficients of the features, which is the symbol `w` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf </param> /// <param name="latentWeights">Latent representation of each feature. Note that one feature may have <see cref="FieldCount"/> latent vectors /// and each latent vector contains <see cref="LatentDim"/> values. In the f-th field, the j-th feature's latent vector, `v_{j, f}` in the doc /// https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf, starts at latentWeights[j * fieldCount * latentDim + f * latentDim]. /// The k-th element in v_{j, f} is latentWeights[j * fieldCount * latentDim + f * latentDim + k]. The size of the array must be featureCount x fieldCount x latentDim.</param> public FieldAwareFactorizationMachineModelParameters(IHostEnvironment env, bool norm, int fieldCount, int featureCount, int latentDim, float[] linearWeights, float[] latentWeights) : base(env, LoaderSignature) { Host.Assert(fieldCount > 0); Host.Assert(featureCount > 0); Host.Assert(latentDim > 0); Host.Assert(Utils.Size(linearWeights) == featureCount); LatentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(latentDim); Host.Assert(Utils.Size(latentWeights) == checked (featureCount * fieldCount * LatentDimAligned)); _norm = norm; FieldCount = fieldCount; FeatureCount = featureCount; LatentDim = latentDim; _linearWeights = linearWeights; _latentWeightsAligned = new AlignedArray(FeatureCount * FieldCount * LatentDimAligned, 16); for (int j = 0; j < FeatureCount; j++) { for (int f = 0; f < FieldCount; f++) { int index = j * FieldCount * LatentDim + f * LatentDim; int indexAligned = j * FieldCount * LatentDimAligned + f * LatentDimAligned; for (int k = 0; k < LatentDimAligned; k++) { if (k < LatentDim) { _latentWeightsAligned[indexAligned + k] = latentWeights[index + k]; } else { _latentWeightsAligned[indexAligned + k] = 0; } } } } }