public static void SaveCompressed(this TrainingResults result, string path) { Guard.NotNull(() => result, result); Guard.NotNullOrEmpty(() => path, path); log.Debug("SaveCompressed: {0}", path); if (result.DataSet.TotalDocuments > 0) { result.Header.AverageVectorSize = result.DataSet.Documents.Average(item => item.Count); } using (FileStream zipToOpen = new FileStream(path, FileMode.Create)) { using (ZipArchive archive = new ZipArchive(zipToOpen, ZipArchiveMode.Create)) { ZipArchiveEntry readmeEntry = archive.CreateEntry(headerFile); using (var stream = readmeEntry.Open()) { result.Header.XmlSerialize().Save(stream); } ZipArchiveEntry resultEntry = archive.CreateEntry(arffFile); using (var stream = resultEntry.Open()) { SaveArff(result.DataSet, stream); } ZipArchiveEntry modelEntry = archive.CreateEntry(modelFile); using (var stream = modelEntry.Open()) { result.Model.Write(stream); } } } }
public static void Save(this TrainingResults result, string path) { Guard.NotNull(() => result, result); Guard.NotNullOrEmpty(() => path, path); log.Debug("Save: {0}", path); path.EnsureDirectoryExistence(); if (result.DataSet.TotalDocuments > 0) { result.Header.AverageVectorSize = result.DataSet.Documents.Average(item => item.Count); } result.Header.XmlSerialize().Save(Path.Combine(path, headerFile)); using (FileStream stream = new FileStream(Path.Combine(path, arffFile), FileMode.Create)) { SaveArff(result.DataSet, stream); } using (FileStream stream = new FileStream(Path.Combine(path, modelFile), FileMode.Create)) { result.Model.Write(stream); } }