protected virtual void SaveCore(ModelSaveContext ctx) { // *** Binary format *** // model: prediction model. // stream: empty data view that contains train schema. // id of string: feature column. ctx.SaveModel(Model, DirModel); ctx.SaveBinaryStream(DirTransSchema, writer => { using (var ch = Host.Start("Saving train schema")) { var saver = new BinarySaver(Host, new BinarySaver.Arguments { Silent = true }); DataSaverUtils.SaveDataView(ch, saver, new EmptyDataView(Host, TrainSchema), writer.BaseStream); } }); ctx.SaveStringOrNull(FeatureColumn); }
public virtual void Save(ModelSaveContext ctx) { Host.CheckValue(ctx, nameof(ctx)); Host.Assert(InitialWindowSize >= 0); Host.Assert(WindowSize >= 0); // *** Binary format *** // int: _windowSize // int: _initialWindowSize // int (string ID): _inputColumnName // int (string ID): _outputColumnName // ColumnType: _transform.Schema.GetColumnType(0) ctx.Writer.Write(WindowSize); ctx.Writer.Write(InitialWindowSize); ctx.SaveNonEmptyString(InputColumnName); ctx.SaveNonEmptyString(OutputColumnName); var bs = new BinarySaver(Host, new BinarySaver.Arguments()); bs.TryWriteTypeDescription(ctx.Writer.BaseStream, OutputColumnType, out int byteWritten); }
private static void SaveIdvToFile(IDataView idv, string path, IHost host) { if (path == STDNULL) { return; } var extension = Path.GetExtension(path); IDataSaver saver; if (extension != ".csv" && extension != ".tsv" && extension != ".txt") { saver = new BinarySaver(host, new BinarySaver.Arguments()); var schemaFilePath = Path.GetDirectoryName(path) + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(path) + ".schema"; SaveIdvSchemaToFile(idv, schemaFilePath, host); } else { var saverArgs = new TextSaver.Arguments { OutputHeader = true, OutputSchema = true, Dense = true, Separator = extension == ".csv" ? "comma" : "tab" }; saver = new TextSaver(host, saverArgs); } using (var fs = File.OpenWrite(path)) { saver.SaveData(fs, idv, Utils.GetIdentityPermutation(idv.Schema.Count) .Where(x => !idv.Schema[x].IsHidden && saver.IsColumnSavable(idv.Schema[x].Type)) .ToArray()); } }
public static void PlayDoubleElimination(bool isFromSavedFile, PrintBracket print) { DoubleEliminationTournament tournament; if (isFromSavedFile) { tournament = BinarySaver.LoadDoubleFromBinnary(); } else { int amount = DataInput.InputAmount(); var participants = DataInput.InputNames(amount, _maxNameLength); tournament = new DoubleEliminationTournament(participants); } List <Participant> nextUpperBracketRound; List <Participant> nextLowerBracketRound; while (!tournament.EndOfTheGame()) { nextUpperBracketRound = tournament.GetBracket(); Console.Clear(); Console.WriteLine("----Upper Bracket----"); print(nextUpperBracketRound); nextLowerBracketRound = tournament.GetLowerBracket(); Console.WriteLine("----Lower Bracket----"); print(nextLowerBracketRound); var meeting = tournament.GetPlayingParticipants(); var side = DataInput.InputWinner(meeting); tournament.PlayGame(side); } nextUpperBracketRound = tournament.GetBracket(); Console.Clear(); Console.WriteLine("----Upper Bracket----"); print(nextUpperBracketRound); Console.ReadLine(); }
public static void Save() { // Get all IsPersistent board names string[] allBoardsContaining = BlackBoard.GetAllBoardsContaining("IsPersistent"); // List of all objects to be saved. <string>board, <string>field, <string|int|float|bool>value List <object> list = new List <object>(); for (int i = 0; i < allBoardsContaining.Length; i++) { string[] fields = BlackBoard.GetAllFieldsFromBoard(allBoardsContaining[i]); for (int j = 0; j < fields.Length; j++) { object value = BlackBoard.Read <object>(allBoardsContaining[i], fields[j]); list.Add(allBoardsContaining[i]); list.Add(fields[j]); if (value.GetType() == typeof(bool)) { list.Add("__BOOLEAN:" + ((bool)value == true ? "true" : "false")); } if (value.GetType() == typeof(int)) { list.Add("__INT32:" + value.ToString()); } if (value.GetType() == typeof(string)) { list.Add(value); } } } BinarySaver.WriteBinFile(list.ToArray()); }
private protected virtual void SaveModel(ModelSaveContext ctx) { Host.CheckValue(ctx, nameof(ctx)); Host.Assert(InitialWindowSize >= 0); Host.Assert(WindowSize >= 0); // *** Binary format *** // int: _windowSize // int: _initialWindowSize // int (string ID): _sourceColumnName // int (string ID): _outputColumnName // ColumnType: _transform.Schema.GetColumnType(0) ctx.Writer.Write(WindowSize); ctx.Writer.Write(InitialWindowSize); ctx.SaveNonEmptyString(InputColumnName); ctx.SaveNonEmptyString(OutputColumnName); ctx.Writer.Write(ForecastingConfidenceIntervalMinOutputColumnName ?? string.Empty); ctx.Writer.Write(ForecastingConfidenceIntervalMaxOutputColumnName ?? string.Empty); var bs = new BinarySaver(Host, new BinarySaver.Arguments()); bs.TryWriteTypeDescription(ctx.Writer.BaseStream, OutputColumnType, out int byteWritten); }
void New_FileBasedSavingOfData() { var ml = new MLContext(seed: 1, conc: 1); var src = new MultiFileSource(GetDataPath(TestDatasets.Sentiment.trainFilename)); var trainData = ml.Data.TextReader(MakeSentimentTextLoaderArgs()) .Append(ml.Transforms.Text.FeaturizeText("SentimentText", "Features")) .Fit(src).Read(src); var path = DeleteOutputPath("i.idv"); using (var file = File.Create(path)) { var saver = new BinarySaver(ml, new BinarySaver.Arguments()); using (var ch = ((IHostEnvironment)ml).Start("SaveData")) DataSaverUtils.SaveDataView(ch, saver, trainData, file); } var trainer = ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1); var loadedTrainData = new BinaryLoader(ml, new BinaryLoader.Arguments(), new MultiFileSource(path)); // Train. var model = trainer.Fit(loadedTrainData); }
/// <summary> /// Action effectuée lors d'un click sur la tile sauvegarder /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void TileSave_OnClick(object sender, RoutedEventArgs e) { ISaver saver = new BinarySaver(); string directorySaveName = "Save"; string fullSavePath = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + "\\" + directorySaveName; Console.WriteLine(fullSavePath); if (Directory.Exists(fullSavePath) == false) { Directory.CreateDirectory(fullSavePath); } SaveFileDialog saveFileDialog = new SaveFileDialog { Filter = saver.Filter(), InitialDirectory = fullSavePath }; if (saveFileDialog.ShowDialog() == true) { saver.Save(_gameView.Game.Container, saveFileDialog.FileName); } }
public static void Load() { object[] array = BinarySaver.ReadBinFile(); if (array == null) { return; } for (int i = 0; i < array.Length - 3; i += 3) { string board = (string)array[i]; string field = (string)array[i + 1]; object value = (object)array[i + 2]; if (value.GetType() == typeof(string)) { if (((string)value).StartsWith("__BOOLEAN:")) { bool b = ((string)value).Contains("true") ? true : false; BlackBoard.WriteInstant(board, field, b); continue; } if (((string)value).StartsWith("__INT32:")) { int v = int.Parse(((string)value).Replace("__INT32:", "")); BlackBoard.WriteInstant(board, field, v); continue; } } BlackBoard.WriteInstant(board, field, value); } }
public void SyncImmediateStates(int slot) { // load a copy of the current save parallel to runtime, add/remove immediate states as necessary, and save // don't add any of the current runtime properties if (BinarySaver.HasFile(slot)) { Save diskSave = BinarySaver.LoadFile(slot); // prune old states List <string> toPrune = new List <string>(); foreach (string diskState in diskSave.gameStates) { if ((Resources.Load("ScriptableObjects/Game States/" + diskState) as GameState).writeImmediately) { if (!runtime.save.gameStates.Contains(diskState)) { toPrune.Add(diskState); } } } foreach (string s in toPrune) { diskSave.gameStates.Remove(s); } // add new states foreach (string stateName in runtime.save.gameStates) { if ((Resources.Load("ScriptableObjects/Game States/" + stateName) as GameState).writeImmediately) { diskSave.gameStates.Add(stateName); } } // save the non-runtime save loaded from disk BinarySaver.SaveFile(diskSave, slot); } }
public void Save(IHostEnvironment env, ModelSaveContext ctx) { Contracts.AssertValue(ctx); // *** Binary format *** // Schema of the data view containing the optional columns // int: number of added columns // for each added column // int: id of output column name // ColumnType: the type of the column var noRows = new EmptyDataView(env, _inputWithOptionalColumn); var saverArgs = new BinarySaver.Arguments(); saverArgs.Silent = true; var saver = new BinarySaver(env, saverArgs); using (var strm = new MemoryStream()) { saver.SaveData(strm, noRows, _srcColsWithOptionalColumn); ctx.SaveBinaryStream("Schema.idv", w => w.WriteByteArray(strm.ToArray())); } int size = InfoCount; ctx.Writer.Write(size); saver = new BinarySaver(env, new BinarySaver.Arguments()); for (int i = 0; i < size; i++) { ctx.SaveNonEmptyString(GetColumnNameCore(i)); var columnType = ColumnTypes[i]; int written; saver.TryWriteTypeDescription(ctx.Writer.BaseStream, columnType, out written); } }
private void RunCore(IChannel ch) { Host.AssertValue(ch); ch.Trace("Creating loader"); LoadModelObjects(ch, true, out var predictor, true, out var trainSchema, out var loader); ch.AssertValue(predictor); ch.AssertValueOrNull(trainSchema); ch.AssertValue(loader); ch.Trace("Creating pipeline"); var scorer = Args.Scorer; ch.Assert(scorer == null || scorer is ICommandLineComponentFactory, "ScoreCommand should only be used from the command line."); var bindable = ScoreUtils.GetSchemaBindableMapper(Host, predictor, scorerFactorySettings: scorer as ICommandLineComponentFactory); ch.AssertValue(bindable); // REVIEW: We probably ought to prefer role mappings from the training schema. string feat = TrainUtils.MatchNameOrDefaultOrNull(ch, loader.Schema, nameof(Args.FeatureColumn), Args.FeatureColumn, DefaultColumnNames.Features); string group = TrainUtils.MatchNameOrDefaultOrNull(ch, loader.Schema, nameof(Args.GroupColumn), Args.GroupColumn, DefaultColumnNames.GroupId); var customCols = TrainUtils.CheckAndGenerateCustomColumns(ch, Args.CustomColumn); var schema = new RoleMappedSchema(loader.Schema, label: null, feature: feat, group: group, custom: customCols, opt: true); var mapper = bindable.Bind(Host, schema); if (scorer == null) { scorer = ScoreUtils.GetScorerComponent(Host, mapper); } loader = CompositeDataLoader.ApplyTransform(Host, loader, "Scorer", scorer.ToString(), (env, view) => scorer.CreateComponent(env, view, mapper, trainSchema)); loader = CompositeDataLoader.Create(Host, loader, Args.PostTransform); if (!string.IsNullOrWhiteSpace(Args.OutputModelFile)) { ch.Trace("Saving the data pipe"); SaveLoader(loader, Args.OutputModelFile); } ch.Trace("Creating saver"); IDataSaver writer; if (Args.Saver == null) { var ext = Path.GetExtension(Args.OutputDataFile); var isText = ext == ".txt" || ext == ".tlc"; if (isText) { writer = new TextSaver(Host, new TextSaver.Arguments()); } else { writer = new BinarySaver(Host, new BinarySaver.Arguments()); } } else { writer = Args.Saver.CreateComponent(Host); } ch.Assert(writer != null); var outputIsBinary = writer is BinaryWriter; bool outputAllColumns = Args.OutputAllColumns == true || (Args.OutputAllColumns == null && Utils.Size(Args.OutputColumn) == 0 && outputIsBinary); bool outputNamesAndLabels = Args.OutputAllColumns == true || Utils.Size(Args.OutputColumn) == 0; if (Args.OutputAllColumns == true && Utils.Size(Args.OutputColumn) != 0) { ch.Warning(nameof(Args.OutputAllColumns) + "=+ always writes all columns irrespective of " + nameof(Args.OutputColumn) + " specified."); } if (!outputAllColumns && Utils.Size(Args.OutputColumn) != 0) { foreach (var outCol in Args.OutputColumn) { if (!loader.Schema.TryGetColumnIndex(outCol, out int dummyColIndex)) { throw ch.ExceptUserArg(nameof(Arguments.OutputColumn), "Column '{0}' not found.", outCol); } } } uint maxScoreId = 0; if (!outputAllColumns) { maxScoreId = loader.Schema.GetMaxMetadataKind(out int colMax, MetadataUtils.Kinds.ScoreColumnSetId); } ch.Assert(outputAllColumns || maxScoreId > 0); // score set IDs are one-based var cols = new List <int>(); for (int i = 0; i < loader.Schema.Count; i++) { if (!Args.KeepHidden && loader.Schema.IsHidden(i)) { continue; } if (!(outputAllColumns || ShouldAddColumn(loader.Schema, i, maxScoreId, outputNamesAndLabels))) { continue; } var type = loader.Schema.GetColumnType(i); if (writer.IsColumnSavable(type)) { cols.Add(i); } else { ch.Warning("The column '{0}' will not be written as it has unsavable column type.", loader.Schema.GetColumnName(i)); } } ch.Check(cols.Count > 0, "No valid columns to save"); ch.Trace("Scoring and saving data"); using (var file = Host.CreateOutputFile(Args.OutputDataFile)) using (var stream = file.CreateWriteStream()) writer.SaveData(stream, loader, cols.ToArray()); }
private NAReplaceTransform(IHost host, ModelLoadContext ctx, IDataView input) : base(host, ctx, input, TestType) { Host.AssertValue(ctx); Host.AssertNonEmpty(Infos); GetInfoAndMetadata(out _types, out _isNAs); // *** Binary format *** // <base> // for each column: // type and value _repValues = new object[Infos.Length]; _repIsDefault = new BitArray[Infos.Length]; var saver = new BinarySaver(Host, new BinarySaver.Arguments()); for (int iinfo = 0; iinfo < Infos.Length; iinfo++) { object repValue; ColumnType repType; if (!saver.TryLoadTypeAndValue(ctx.Reader.BaseStream, out repType, out repValue)) { throw Host.ExceptDecode(); } if (!_types[iinfo].ItemType.Equals(repType.ItemType)) { throw Host.ExceptParam(nameof(input), "Decoded serialization of type '{0}' does not match expected ColumnType of '{1}'", repType.ItemType, _types[iinfo].ItemType); } // If type is a vector and the value is not either a scalar or a vector of the same size, throw an error. if (repType.IsVector) { if (!_types[iinfo].IsVector) { throw Host.ExceptParam(nameof(input), "Decoded serialization of type '{0}' cannot be a vector when Columntype is a scalar of type '{1}'", repType, _types[iinfo]); } if (!_types[iinfo].IsKnownSizeVector) { throw Host.ExceptParam(nameof(input), "Decoded serialization for unknown size vector '{0}' must be a scalar instead of type '{1}'", _types[iinfo], repType); } if (_types[iinfo].VectorSize != repType.VectorSize) { throw Host.ExceptParam(nameof(input), "Decoded serialization of type '{0}' must be a scalar or a vector of the same size as Columntype '{1}'", repType, _types[iinfo]); } // REVIEW: The current implementation takes the serialized VBuffer, densifies it, and stores the values array. // It might be of value to consider storing the VBUffer in order to possibly benefit from sparsity. However, this would // necessitate a reimplementation of the FillValues code to accomodate sparse VBuffers. object[] args = new object[] { repValue, _types[iinfo], iinfo }; Func <VBuffer <int>, ColumnType, int, int[]> func = GetValuesArray <int>; var meth = func.GetMethodInfo().GetGenericMethodDefinition().MakeGenericMethod(repType.ItemType.RawType); _repValues[iinfo] = meth.Invoke(this, args); } else { _repValues[iinfo] = repValue; } Host.Assert(repValue.GetType() == _types[iinfo].RawType || repValue.GetType() == _types[iinfo].ItemType.RawType); } }
public MainWindow() { InitializeComponent(); Topics = BinarySaver.LoadFromBinnary(); }
public static bool HasSavedGame() { return(BinarySaver.HasFile(saveSlot)); }
private void BinaryLoad() { SL.serializableList = BinarySaver.Load <List <SerialItem> >("Inventory"); }
public void WriteToDiskSlot(int slot) { SaveRuntime(); BinarySaver.SaveFile(runtime.save, slot); }
public void LoadFromSlot(int slot) { runtime.save = BinarySaver.LoadFile(slot); LoadRuntime(); runtime.loadedOnce = true; }
PrepareData(MLContext mlContext) { IDataView data = null; IDataView trainData = null; IDataView testData = null; // Step one: read the data as an IDataView. // Create the reader: define the data columns // and where to find them in the text file. var reader = new TextLoader(mlContext, new TextLoader.Arguments { Column = new[] { // A boolean column depicting the 'label'. new TextLoader.Column("Label", DataKind.BL, 30), // 29 Features V1..V28 + Amount new TextLoader.Column("V1", DataKind.R4, 1), new TextLoader.Column("V2", DataKind.R4, 2), new TextLoader.Column("V3", DataKind.R4, 3), new TextLoader.Column("V4", DataKind.R4, 4), new TextLoader.Column("V5", DataKind.R4, 5), new TextLoader.Column("V6", DataKind.R4, 6), new TextLoader.Column("V7", DataKind.R4, 7), new TextLoader.Column("V8", DataKind.R4, 8), new TextLoader.Column("V9", DataKind.R4, 9), new TextLoader.Column("V10", DataKind.R4, 10), new TextLoader.Column("V11", DataKind.R4, 11), new TextLoader.Column("V12", DataKind.R4, 12), new TextLoader.Column("V13", DataKind.R4, 13), new TextLoader.Column("V14", DataKind.R4, 14), new TextLoader.Column("V15", DataKind.R4, 15), new TextLoader.Column("V16", DataKind.R4, 16), new TextLoader.Column("V17", DataKind.R4, 17), new TextLoader.Column("V18", DataKind.R4, 18), new TextLoader.Column("V19", DataKind.R4, 19), new TextLoader.Column("V20", DataKind.R4, 20), new TextLoader.Column("V21", DataKind.R4, 21), new TextLoader.Column("V22", DataKind.R4, 22), new TextLoader.Column("V23", DataKind.R4, 23), new TextLoader.Column("V24", DataKind.R4, 24), new TextLoader.Column("V25", DataKind.R4, 25), new TextLoader.Column("V26", DataKind.R4, 26), new TextLoader.Column("V27", DataKind.R4, 27), new TextLoader.Column("V28", DataKind.R4, 28), new TextLoader.Column("Amount", DataKind.R4, 29), }, // First line of the file is a header, not a data row. HasHeader = true, Separator = "," }); // We know that this is a Binary Classification task, // so we create a Binary Classification context: // it will give us the algorithms we need, // as well as the evaluation procedure. var classification = new BinaryClassificationContext(mlContext); if (!File.Exists(Path.Combine(_outputPath, "testData.idv")) && !File.Exists(Path.Combine(_outputPath, "trainData.idv"))) { // Split the data 80:20 into train and test sets, train and evaluate. data = reader.Read(new MultiFileSource(_dataSetFile)); ConsoleHelpers.ConsoleWriteHeader("Show 4 transactions fraud (true) and 4 transactions not fraud (false) - (source)"); ConsoleHelpers.InspectData(mlContext, data, 4); // Can't do stratification when column type is a boolean, is this an issue? //(trainData, testData) = classification.TrainTestSplit(data, testFraction: 0.2, stratificationColumn: "Label"); (trainData, testData) = classification.TrainTestSplit(data, testFraction: 0.2); // save test split IHostEnvironment env = (IHostEnvironment)mlContext; using (var ch = env.Start("SaveData")) using (var file = env.CreateOutputFile(Path.Combine(_outputPath, "testData.idv"))) { var saver = new BinarySaver(mlContext, new BinarySaver.Arguments()); DataSaverUtils.SaveDataView(ch, saver, testData, file); } // save train split using (var ch = ((IHostEnvironment)env).Start("SaveData")) using (var file = env.CreateOutputFile(Path.Combine(_outputPath, "trainData.idv"))) { var saver = new BinarySaver(mlContext, new BinarySaver.Arguments()); DataSaverUtils.SaveDataView(ch, saver, trainData, file); } } else { // Load splited data var binTrainData = new BinaryLoader(mlContext, new BinaryLoader.Arguments(), new MultiFileSource(Path.Combine(_outputPath, "trainData.idv"))); var trainRoles = new RoleMappedData(binTrainData, roles: TransactionObservation.Roles()); trainData = trainRoles.Data; var binTestData = new BinaryLoader(mlContext, new BinaryLoader.Arguments(), new MultiFileSource(Path.Combine(_outputPath, "testData.idv"))); var testRoles = new RoleMappedData(binTestData, roles: TransactionObservation.Roles()); testData = testRoles.Data; } ConsoleHelpers.ConsoleWriteHeader("Show 4 transactions fraud (true) and 4 transactions not fraud (false) - (traindata)"); ConsoleHelpers.InspectData(mlContext, trainData, 4); ConsoleHelpers.ConsoleWriteHeader("Show 4 transactions fraud (true) and 4 transactions not fraud (false) - (testData)"); ConsoleHelpers.InspectData(mlContext, testData, 4); return(classification, reader, trainData, testData); }
private void BinarySave() { BinarySaver.Save(SL.serializableList, "Inventory"); }