private static void SaveCore(ModelSaveContext ctx, Action <ModelSaveContext> loaderSaveAction, TransformEx[] transforms) { Contracts.AssertValue(ctx); Contracts.AssertValue(loaderSaveAction); Contracts.AssertValueOrNull(transforms); // *** Binary format *** // int: sizeof(Float) // int: number of transforms // foreach transform: (starting from version VersionAddedTags) // string: tag // string: args string ctx.Writer.Write(sizeof(Float)); ctx.Writer.Write(transforms.Length); using (var loaderCtx = new ModelSaveContext(ctx.Repository, Path.Combine(ctx.Directory ?? "", "Loader"), ModelLoadContext.ModelStreamName)) { loaderSaveAction(loaderCtx); loaderCtx.Done(); } for (int i = 0; i < transforms.Length; i++) { var dirName = string.Format(TransformDirTemplate, i); ctx.SaveModel(transforms[i].Transform, dirName); Contracts.AssertNonEmpty(transforms[i].Tag); ctx.SaveNonEmptyString(transforms[i].Tag); ctx.SaveStringOrNull(transforms[i].ArgsString); } }
protected override void SaveCore(ModelSaveContext ctx) { // *** Binary format *** // (Base class) // LinearModelStatistics: model statistics (optional, in a separate stream) base.SaveCore(ctx); Contracts.AssertValueOrNull(_stats); if (_stats != null) { using (var statsCtx = new ModelSaveContext(ctx.Repository, Path.Combine(ctx.Directory ?? "", ModelStatsSubModelFilename), ModelLoadContext.ModelStreamName)) { _stats.Save(statsCtx); statsCtx.Done(); } } ctx.SetVersionInfo(GetVersionInfo()); }
protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); Host.Assert(_biases.Length == _numClasses); Host.Assert(_biases.Length == _weights.Length); #if DEBUG foreach (var fw in _weights) { Host.Assert(fw.Length == _numFeatures); } #endif // *** Binary format *** // int: number of features // int: number of classes = number of biases // float[]: biases // (weight matrix, in CSR if sparse) // (see https://netlib.org/linalg/html_templates/node91.html#SECTION00931100000000000000) // int: number of row start indices (_numClasses + 1 if sparse, 0 if dense) // int[]: row start indices // int: total number of column indices (0 if dense) // int[]: column index of each non-zero weight // int: total number of non-zero weights (same as number of column indices if sparse, num of classes * num of features if dense) // float[]: non-zero weights // bool: whether label names are present // int[]: Id of label names (optional, in a separate stream) // LinearModelStatistics: model statistics (optional, in a separate stream) ctx.Writer.Write(_numFeatures); ctx.Writer.Write(_numClasses); ctx.Writer.WriteFloatsNoCount(_biases, _numClasses); // _weights == _weighsDense means we checked that all vectors in _weights // are actually dense, and so we assigned the same object, or it came dense // from deserialization. if (_weights == _weightsDense) { ctx.Writer.Write(0); // Number of starts. ctx.Writer.Write(0); // Number of indices. ctx.Writer.Write(_numFeatures * _weights.Length); foreach (var fv in _weights) { Host.Assert(fv.Length == _numFeatures); ctx.Writer.WriteFloatsNoCount(fv.Values, _numFeatures); } } else { // Number of starts. ctx.Writer.Write(_numClasses + 1); // Starts always starts with 0. int numIndices = 0; ctx.Writer.Write(numIndices); for (int i = 0; i < _weights.Length; i++) { // REVIEW: Assuming the presence of *any* zero justifies // writing in sparse format seems stupid, but might be difficult // to change without changing the format since the presence of // any sparse vector means we're writing indices anyway. Revisit. // This is actually a bug waiting to happen: sparse/dense vectors // can have different dot products even if they are logically the // same vector. numIndices += NonZeroCount(ref _weights[i]); ctx.Writer.Write(numIndices); } ctx.Writer.Write(numIndices); { // just scoping the count so we can use another further down int count = 0; foreach (var fw in _weights) { if (fw.IsDense) { for (int i = 0; i < fw.Length; i++) { if (fw.Values[i] != 0) { ctx.Writer.Write(i); count++; } } } else { ctx.Writer.WriteIntsNoCount(fw.Indices, fw.Count); count += fw.Count; } } Host.Assert(count == numIndices); } ctx.Writer.Write(numIndices); { int count = 0; foreach (var fw in _weights) { if (fw.IsDense) { for (int i = 0; i < fw.Length; i++) { if (fw.Values[i] != 0) { ctx.Writer.Write(fw.Values[i]); count++; } } } else { ctx.Writer.WriteFloatsNoCount(fw.Values, fw.Count); count += fw.Count; } } Host.Assert(count == numIndices); } } Contracts.AssertValueOrNull(_labelNames); if (_labelNames != null) { ctx.SaveBinaryStream(LabelNamesSubModelFilename, w => SaveLabelNames(ctx, w)); } Contracts.AssertValueOrNull(_stats); if (_stats != null) { using (var statsCtx = new ModelSaveContext(ctx.Repository, Path.Combine(ctx.Directory ?? "", ModelStatsSubModelFilename), ModelLoadContext.ModelStreamName)) { _stats.Save(statsCtx); statsCtx.Done(); } } }