private protected override void SaveModel(ModelSaveContext ctx) { Host.AssertValue(ctx); ctx.CheckAtModel(); ctx.SetVersionInfo(GetVersionInfo()); ctx.SaveBinaryStream("OnnxModel", w => { w.WriteByteArray(Model.ToByteArray()); }); Host.CheckNonEmpty(Inputs, nameof(Inputs)); ctx.Writer.Write(Inputs.Length); foreach (var colName in Inputs) { ctx.SaveNonEmptyString(colName); } Host.CheckNonEmpty(Outputs, nameof(Outputs)); ctx.Writer.Write(Outputs.Length); foreach (var colName in Outputs) { ctx.SaveNonEmptyString(colName); } }
public void Save(ModelSaveContext ctx) { _host.AssertValue(ctx); ctx.CheckAtModel(); ctx.SetVersionInfo(GetVersionInfo()); var buffer = new TFBuffer(); _session.Graph.ToGraphDef(buffer); ctx.SaveBinaryStream("TFModel", w => { w.WriteByteArray(buffer.ToArray()); }); Contracts.AssertNonEmpty(_inputColNames); ctx.Writer.Write(_inputColNames.Length); foreach (var colName in _inputColNames) { ctx.SaveNonEmptyString(colName); } ctx.SaveNonEmptyString(_outputColName); }
public void Save(IHostEnvironment env, ModelSaveContext ctx) { Contracts.AssertValue(ctx); // *** Binary format *** // Schema of the data view containing the optional columns // int: number of added columns // for each added column // int: id of output column name // ColumnType: the type of the column var noRows = new EmptyDataView(env, _inputWithOptionalColumn); var saverArgs = new BinarySaver.Arguments(); saverArgs.Silent = true; var saver = new BinarySaver(env, saverArgs); using (var strm = new MemoryStream()) { saver.SaveData(strm, noRows, _srcColsWithOptionalColumn); ctx.SaveBinaryStream("Schema.idv", w => w.WriteByteArray(strm.ToArray())); } int size = InfoCount; ctx.Writer.Write(size); saver = new BinarySaver(env, new BinarySaver.Arguments()); for (int i = 0; i < size; i++) { ctx.SaveNonEmptyString(GetColumnNameCore(i)); var columnType = ColumnTypes[i]; int written; saver.TryWriteTypeDescription(ctx.Writer.BaseStream, columnType, out written); } }
public void Save(ModelSaveContext ctx) { _host.AssertValue(ctx); ctx.CheckAtModel(); ctx.SetVersionInfo(GetVersionInfo()); // *** Binary format *** // stream: tensorFlow model. // int: number of input columns // for each input column // int: id of int column name // int: number of output columns // for each output column // int: id of output column name var buffer = new TFBuffer(); Session.Graph.ToGraphDef(buffer); ctx.SaveBinaryStream("TFModel", w => { w.WriteByteArray(buffer.ToArray()); }); _host.AssertNonEmpty(Inputs); ctx.Writer.Write(Inputs.Length); foreach (var colName in Inputs) { ctx.SaveNonEmptyString(colName); } _host.AssertNonEmpty(Outputs); ctx.Writer.Write(Outputs.Length); foreach (var colName in Outputs) { ctx.SaveNonEmptyString(colName); } }
private protected override void SaveModel(ModelSaveContext ctx) { Host.AssertValue(ctx); ctx.CheckAtModel(); ctx.SetVersionInfo(GetVersionInfo()); ctx.SaveBinaryStream("OnnxModel", w => { w.WriteByteArray(File.ReadAllBytes(Model.ModelStream.Name)); }); Host.CheckNonEmpty(Inputs, nameof(Inputs)); ctx.Writer.Write(Inputs.Length); foreach (var colName in Inputs) { ctx.SaveNonEmptyString(colName); } Host.CheckNonEmpty(Outputs, nameof(Outputs)); ctx.Writer.Write(Outputs.Length); foreach (var colName in Outputs) { ctx.SaveNonEmptyString(colName); } // Save custom-provided shapes. Those shapes overwrite shapes loaded from the ONNX model file. int customShapeInfosLength = _options.CustomShapeInfos != null ? _options.CustomShapeInfos.Length : 0; ctx.Writer.Write(customShapeInfosLength); for (int i = 0; i < customShapeInfosLength; ++i) { var info = _options.CustomShapeInfos[i]; ctx.SaveNonEmptyString(info.Name); ctx.Writer.WriteIntArray(info.Shape); } ctx.Writer.Write(_options.RecursionLimit); }
protected override void SaveCore(ModelSaveContext ctx) { base.SaveCore(ctx); ctx.SetVersionInfo(GetVersionInfo()); Host.Assert(_biases.Length == _numClasses); Host.Assert(_biases.Length == _weights.Length); #if DEBUG foreach (var fw in _weights) { Host.Assert(fw.Length == _numFeatures); } #endif // *** Binary format *** // int: number of features // int: number of classes = number of biases // float[]: biases // (weight matrix, in CSR if sparse) // (see https://netlib.org/linalg/html_templates/node91.html#SECTION00931100000000000000) // int: number of row start indices (_numClasses + 1 if sparse, 0 if dense) // int[]: row start indices // int: total number of column indices (0 if dense) // int[]: column index of each non-zero weight // int: total number of non-zero weights (same as number of column indices if sparse, num of classes * num of features if dense) // float[]: non-zero weights // bool: whether label names are present // int[]: Id of label names (optional, in a separate stream) // LinearModelStatistics: model statistics (optional, in a separate stream) ctx.Writer.Write(_numFeatures); ctx.Writer.Write(_numClasses); ctx.Writer.WriteFloatsNoCount(_biases, _numClasses); // _weights == _weighsDense means we checked that all vectors in _weights // are actually dense, and so we assigned the same object, or it came dense // from deserialization. if (_weights == _weightsDense) { ctx.Writer.Write(0); // Number of starts. ctx.Writer.Write(0); // Number of indices. ctx.Writer.Write(_numFeatures * _weights.Length); foreach (var fv in _weights) { Host.Assert(fv.Length == _numFeatures); ctx.Writer.WriteFloatsNoCount(fv.Values, _numFeatures); } } else { // Number of starts. ctx.Writer.Write(_numClasses + 1); // Starts always starts with 0. int numIndices = 0; ctx.Writer.Write(numIndices); for (int i = 0; i < _weights.Length; i++) { // REVIEW: Assuming the presence of *any* zero justifies // writing in sparse format seems stupid, but might be difficult // to change without changing the format since the presence of // any sparse vector means we're writing indices anyway. Revisit. // This is actually a bug waiting to happen: sparse/dense vectors // can have different dot products even if they are logically the // same vector. numIndices += NonZeroCount(ref _weights[i]); ctx.Writer.Write(numIndices); } ctx.Writer.Write(numIndices); { // just scoping the count so we can use another further down int count = 0; foreach (var fw in _weights) { if (fw.IsDense) { for (int i = 0; i < fw.Length; i++) { if (fw.Values[i] != 0) { ctx.Writer.Write(i); count++; } } } else { ctx.Writer.WriteIntsNoCount(fw.Indices, fw.Count); count += fw.Count; } } Host.Assert(count == numIndices); } ctx.Writer.Write(numIndices); { int count = 0; foreach (var fw in _weights) { if (fw.IsDense) { for (int i = 0; i < fw.Length; i++) { if (fw.Values[i] != 0) { ctx.Writer.Write(fw.Values[i]); count++; } } } else { ctx.Writer.WriteFloatsNoCount(fw.Values, fw.Count); count += fw.Count; } } Host.Assert(count == numIndices); } } Contracts.AssertValueOrNull(_labelNames); if (_labelNames != null) { ctx.SaveBinaryStream(LabelNamesSubModelFilename, w => SaveLabelNames(ctx, w)); } Contracts.AssertValueOrNull(_stats); if (_stats != null) { using (var statsCtx = new ModelSaveContext(ctx.Repository, Path.Combine(ctx.Directory ?? "", ModelStatsSubModelFilename), ModelLoadContext.ModelStreamName)) { _stats.Save(statsCtx); statsCtx.Done(); } } }
public void Save(ModelSaveContext ctx) { _host.AssertValue(ctx); ctx.CheckAtModel(); ctx.SetVersionInfo(GetVersionInfo()); // *** Binary format *** // byte: indicator for frozen models // stream: tensorFlow model. // int: number of input columns // for each input column // int: id of int column name // int: number of output columns // for each output column // int: id of output column name var isFrozen = string.IsNullOrEmpty(_savedModelPath); ctx.Writer.WriteBoolByte(isFrozen); if (isFrozen) { var buffer = new TFBuffer(); Session.Graph.ToGraphDef(buffer); ctx.SaveBinaryStream("TFModel", w => { w.WriteByteArray(buffer.ToArray()); }); } else { ctx.SaveBinaryStream("TFSavedModel", w => { string[] modelFilePaths = Directory.GetFiles(_savedModelPath, "*", SearchOption.AllDirectories); w.Write(modelFilePaths.Length); foreach (var fullPath in modelFilePaths) { var relativePath = fullPath.Substring(_savedModelPath.Length + 1); w.Write(relativePath); using (var fs = new FileStream(fullPath, FileMode.Open)) { long fileLength = fs.Length; w.Write(fileLength); long actualWritten = fs.CopyRange(w.BaseStream, fileLength); _host.Assert(actualWritten == fileLength); } } }); } _host.AssertNonEmpty(Inputs); ctx.Writer.Write(Inputs.Length); foreach (var colName in Inputs) { ctx.SaveNonEmptyString(colName); } _host.AssertNonEmpty(Outputs); ctx.Writer.Write(Outputs.Length); foreach (var colName in Outputs) { ctx.SaveNonEmptyString(colName); } }