private FieldAwareFactorizationMachineModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, LoaderSignature)
        {
            Host.AssertValue(ctx);

            // *** Binary format ***
            // bool: whether to normalize feature vectors
            // int: number of fields
            // int: number of features
            // int: latent dimension
            // float[]: linear coefficients
            // float[]: latent representation of features

            var norm       = ctx.Reader.ReadBoolean();
            var fieldCount = ctx.Reader.ReadInt32();

            Host.CheckDecode(fieldCount > 0);
            var featureCount = ctx.Reader.ReadInt32();

            Host.CheckDecode(featureCount > 0);
            var latentDim = ctx.Reader.ReadInt32();

            Host.CheckDecode(latentDim > 0);
            LatentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(latentDim);
            Host.Check(checked (featureCount * fieldCount * LatentDimAligned) <= Utils.ArrayMaxSize, "Latent dimension too large");
            var linearWeights = ctx.Reader.ReadFloatArray();

            Host.CheckDecode(Utils.Size(linearWeights) == featureCount);
            var latentWeights = ctx.Reader.ReadFloatArray();

            Host.CheckDecode(Utils.Size(latentWeights) == featureCount * fieldCount * latentDim);

            _norm                 = norm;
            FieldCount            = fieldCount;
            FeatureCount          = featureCount;
            LatentDimension       = latentDim;
            _linearWeights        = linearWeights;
            _latentWeightsAligned = new AlignedArray(FeatureCount * FieldCount * LatentDimAligned, 16);
            for (int j = 0; j < FeatureCount; j++)
            {
                for (int f = 0; f < FieldCount; f++)
                {
                    int vBias        = j * FieldCount * LatentDimension + f * LatentDimension;
                    int vBiasAligned = j * FieldCount * LatentDimAligned + f * LatentDimAligned;
                    for (int k = 0; k < LatentDimAligned; k++)
                    {
                        if (k < LatentDimension)
                        {
                            _latentWeightsAligned[vBiasAligned + k] = latentWeights[vBias + k];
                        }
                        else
                        {
                            _latentWeightsAligned[vBiasAligned + k] = 0;
                        }
                    }
                }
            }
        }
        internal FieldAwareFactorizationMachineModelParameters(IHostEnvironment env, bool norm, int fieldCount, int featureCount, int latentDim,
                                                               float[] linearWeights, AlignedArray latentWeightsAligned) : base(env, LoaderSignature)
        {
            Host.Assert(fieldCount > 0);
            Host.Assert(featureCount > 0);
            Host.Assert(latentDim > 0);
            Host.Assert(Utils.Size(linearWeights) == featureCount);
            LatentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(latentDim);
            Host.Assert(latentWeightsAligned.Size == checked (featureCount * fieldCount * LatentDimAligned));

            _norm                 = norm;
            FieldCount            = fieldCount;
            FeatureCount          = featureCount;
            LatentDimension       = latentDim;
            _linearWeights        = linearWeights;
            _latentWeightsAligned = latentWeightsAligned;
        }
Esempio n. 3
0
 /// <summary>
 /// Initializes the instance. Shared between the two constructors.
 /// REVIEW: Once the legacy constructor goes away, this can move to the only constructor and most of the fields can be back to readonly.
 /// </summary>
 /// <param name="env"></param>
 /// <param name="options"></param>
 private void Initialize(IHostEnvironment env, Options options)
 {
     _host.CheckUserArg(options.LatentDimension > 0, nameof(options.LatentDimension), "Must be positive");
     _host.CheckUserArg(options.LambdaLinear >= 0, nameof(options.LambdaLinear), "Must be non-negative");
     _host.CheckUserArg(options.LambdaLatent >= 0, nameof(options.LambdaLatent), "Must be non-negative");
     _host.CheckUserArg(options.LearningRate > 0, nameof(options.LearningRate), "Must be positive");
     _host.CheckUserArg(options.NumberOfIterations >= 0, nameof(options.NumberOfIterations), "Must be non-negative");
     _latentDim        = options.LatentDimension;
     _latentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(_latentDim);
     _lambdaLinear     = options.LambdaLinear;
     _lambdaLatent     = options.LambdaLatent;
     _learningRate     = options.LearningRate;
     _numIterations    = options.NumberOfIterations;
     _norm             = options.NormalizeFeatures;
     _shuffle          = options.Shuffle;
     _verbose          = options.Verbose;
     _radius           = options.Radius;
 }
        /// <summary>
        /// Initialize model parameters with a trained model.
        /// </summary>
        /// <param name="env">The host environment</param>
        /// <param name="norm">True if user wants to normalize feature vector to unit length.</param>
        /// <param name="fieldCount">The number of fileds, which is the symbol `m` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf </param>
        /// <param name="featureCount">The number of features, which is the symbol `n` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf </param>
        /// <param name="latentDim">The latent dimensions, which is the length of `v_{j, f}` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf </param>
        /// <param name="linearWeights">The linear coefficients of the features, which is the symbol `w` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf </param>
        /// <param name="latentWeights">Latent representation of each feature. Note that one feature may have <see cref="FieldCount"/> latent vectors
        /// and each latent vector contains <see cref="LatentDimension"/> values. In the f-th field, the j-th feature's latent vector, `v_{j, f}` in the doc
        /// https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf, starts at latentWeights[j * fieldCount * latentDim + f * latentDim].
        /// The k-th element in v_{j, f} is latentWeights[j * fieldCount * latentDim + f * latentDim + k]. The size of the array must be featureCount x fieldCount x latentDim.</param>
        internal FieldAwareFactorizationMachineModelParameters(IHostEnvironment env, bool norm, int fieldCount, int featureCount, int latentDim,
                                                               float[] linearWeights, float[] latentWeights) : base(env, LoaderSignature)
        {
            Host.Assert(fieldCount > 0);
            Host.Assert(featureCount > 0);
            Host.Assert(latentDim > 0);
            Host.Assert(Utils.Size(linearWeights) == featureCount);
            LatentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(latentDim);
            Host.Assert(Utils.Size(latentWeights) == checked (featureCount * fieldCount * LatentDimAligned));

            _norm           = norm;
            FieldCount      = fieldCount;
            FeatureCount    = featureCount;
            LatentDimension = latentDim;
            _linearWeights  = linearWeights;

            _latentWeightsAligned = new AlignedArray(FeatureCount * FieldCount * LatentDimAligned, 16);

            for (int j = 0; j < FeatureCount; j++)
            {
                for (int f = 0; f < FieldCount; f++)
                {
                    int index        = j * FieldCount * LatentDimension + f * LatentDimension;
                    int indexAligned = j * FieldCount * LatentDimAligned + f * LatentDimAligned;
                    for (int k = 0; k < LatentDimAligned; k++)
                    {
                        if (k < LatentDimension)
                        {
                            _latentWeightsAligned[indexAligned + k] = latentWeights[index + k];
                        }
                        else
                        {
                            _latentWeightsAligned[indexAligned + k] = 0;
                        }
                    }
                }
            }
        }