public ValueTask SaveAsync(Stream stream) { var buffer = new TFBuffer(); TFGraph.ToGraphDef(buffer); return(stream.WriteAsync(buffer.ToArray())); }
/// <summary> /// Load param and model from a model directory /// </summary> /// <param name="model_dir">directory containing param and model (graphdef) file</param> /// <returns>true if successful, false if failed</returns> public virtual bool Load(string model_dir) { bool result = false; try { string param_file = Path.Combine(model_dir, "params.json"); // looks for params.json file, load model string params_json = File.ReadAllText(param_file); _params = JsonConvert.DeserializeObject <Dictionary <string, object> >(params_json); _prefix = (string)(_params["model_name"]); string graphdef_filepath = Path.Combine(model_dir, _prefix + ".graphdef"); _graph = new TFGraph(); TFImportGraphDefOptions opts = new TFImportGraphDefOptions(); TFBuffer buff = new TFBuffer(File.ReadAllBytes(graphdef_filepath)); opts.SetPrefix(_prefix); _graph.Import(buff, opts); _session = new TFSession(_graph); result = true; } catch (Exception) { // somethign went wrong } return(result); }
/// <summary> /// Creates a session and graph from a saved session model /// </summary> /// <returns>On success, this populates the provided <paramref name="graph"/> with the contents of the graph stored in the specified model and <paramref name="metaGraphDef"/> with the MetaGraphDef of the loaded model.</returns> /// <param name="sessionOptions">Session options to use for the new session.</param> /// <param name="runOptions">Options to use to initialize the state (can be null).</param> /// <param name="exportDir">must be set to the path of the exported SavedModel.</param> /// <param name="tags">must include the set of tags used to identify one MetaGraphDef in the SavedModel.</param> /// <param name="graph">This must be a newly created graph.</param> /// <param name="metaGraphDef">On success, this will be populated on return with the contents of the MetaGraphDef (can be null).</param> /// <param name="status">Status buffer, if specified a status code will be left here, if not specified, a <see cref="T:TensorFlow.TFException"/> exception is raised if there is an error.</param> /// <remarks> /// This function creates a new session using the specified <paramref name="sessionOptions"/> and then initializes /// the state (restoring tensors and other assets) using <paramref name="runOptions"/> /// </remarks> public static TFSession FromSavedModel(TFSessionOptions sessionOptions, TFBuffer runOptions, string exportDir, string[] tags, TFGraph graph, string device, TFStatus status = null) { if (graph == null) { throw new ArgumentNullException(nameof(graph)); } if (tags == null) { throw new ArgumentNullException(nameof(tags)); } if (exportDir == null) { throw new ArgumentNullException(nameof(exportDir)); } var cstatus = TFStatus.Setup(status); unsafe { var h = TF_LoadSessionFromSavedModelOnDevice(sessionOptions.handle, runOptions == null ? null : runOptions.LLBuffer, exportDir, tags, tags.Length, graph.handle, device, cstatus.handle); if (cstatus.CheckMaybeRaise(status)) { return(new TFSession(h, graph)); } } return(null); }
public byte[] PersistModelDefinition() { TFBuffer buffer = new TFBuffer(); this.session.Graph.ToGraphDef(buffer); return(buffer.ToArray()); }
static void Main(string[] args) { var hello = Encoding.UTF8.GetBytes("Hello, world!"); var buffer = new TFBuffer(hello); var bytes = buffer.ToArray(); Console.WriteLine(Encoding.UTF8.GetString(bytes)); }
public override void Initialize() { string vocab_json = File.ReadAllText("vocab.json"); _vocab = JsonConvert.DeserializeObject <Dictionary <string, int> >(vocab_json); _graph = new TFGraph(); TFImportGraphDefOptions opts = new TFImportGraphDefOptions(); TFBuffer buff = new TFBuffer(File.ReadAllBytes("commaV5.graphdef")); opts.SetPrefix(_prefix); _graph.Import(buff, opts); _session = new TFSession(_graph); }
public void TestImportGraphDef() { var status = new TFStatus(); TFBuffer graphDef; // Create graph with two nodes, "x" and "3" using (var graph = new TFGraph()) { Assert(status); Placeholder(graph, status); Assert(graph["feed"] != null); ScalarConst(3, graph, status); Assert(graph["scalar"] != null); // Export to GraphDef graphDef = new TFBuffer(); graph.ToGraphDef(graphDef, status); Assert(status); } // Import it again, with a prefix, in a fresh graph using (var graph = new TFGraph()) { using (var options = new TFImportGraphDefOptions()) { options.SetPrefix("imported"); graph.Import(graphDef, options, status); Assert(status); } graphDef.Dispose(); var scalar = graph["imported/scalar"]; var feed = graph["imported/feed"]; Assert(scalar != null); Assert(feed != null); // Can add nodes to the imported graph without trouble Add(feed, scalar, graph, status); Assert(status); } }
public void Save(ModelSaveContext ctx) { _host.AssertValue(ctx); ctx.CheckAtModel(); ctx.SetVersionInfo(GetVersionInfo()); var buffer = new TFBuffer(); _session.Graph.ToGraphDef(buffer); ctx.SaveBinaryStream("TFModel", w => { w.WriteByteArray(buffer.ToArray()); }); Contracts.AssertNonEmpty(_inputColNames); ctx.Writer.Write(_inputColNames.Length); foreach (var colName in _inputColNames) { ctx.SaveNonEmptyString(colName); } ctx.SaveNonEmptyString(_outputColName); }
public void Save(ModelSaveContext ctx) { _host.AssertValue(ctx); ctx.CheckAtModel(); ctx.SetVersionInfo(GetVersionInfo()); // *** Binary format *** // stream: tensorFlow model. // int: number of input columns // for each input column // int: id of int column name // int: number of output columns // for each output column // int: id of output column name var buffer = new TFBuffer(); Session.Graph.ToGraphDef(buffer); ctx.SaveBinaryStream("TFModel", w => { w.WriteByteArray(buffer.ToArray()); }); _host.AssertNonEmpty(Inputs); ctx.Writer.Write(Inputs.Length); foreach (var colName in Inputs) { ctx.SaveNonEmptyString(colName); } _host.AssertNonEmpty(Outputs); ctx.Writer.Write(Outputs.Length); foreach (var colName in Outputs) { ctx.SaveNonEmptyString(colName); } }
static void Main(string[] args) { //图加载 using (var graph = new TFGraph()) { //graph.Import(File.ReadAllBytes("saved_model.pb")); } //TFBuff编解码 var hello = Encoding.UTF8.GetBytes("Hello, world!"); var buffer = new TFBuffer(hello); var bytes = buffer.ToArray(); Console.WriteLine(Encoding.UTF8.GetString(bytes)); //方案1 //string stringdata = File.ReadAllText(FileName); //string [] s=stringdata.Split('\n'); //FileStream fs = new FileStream("E:\\VisualStudio\\Expressior\\src\\Experimental\\mnist\\out\\model_spectext\\csout.txt", FileMode.Create); //StreamWriter sw =new StreamWriter(fs,Encoding.UTF8); //for (int i = 0; i <s.Length; i++) //{ // sw.WriteLine(s[i]); //} //方案2 //byte[] data = File.ReadAllBytes(inFile); //var datastring = ""; FileStream fs = new FileStream(outFile, FileMode.Create); StreamWriter sw = new StreamWriter(fs); //for (int i = 0; i < 1000; i++) //{ // byte[] str = new byte[2]; // str[0] = data[i]; // datastring = Encoding.Unicode.GetString(str); // sw.Write(datastring); //} //方案3:proto编解码 byte[] data = File.ReadAllBytes(inFile); MemoryStream ms1 = new MemoryStream(data); GraphDef Mygraph = Deserialize <GraphDef>(ms1); sw.Flush(); sw.Close(); fs.Close(); //String temp = graph.ToString(); ////Console.WriteLine(graph); ////获得字节数组 //TFBuffer outputGraphDef=new TFBuffer(); //graph.ToGraphDef(outputGraphDef); ////String str = System.Text.Encoding.ASCII.GetString(outputGraphDef.ToArray()); //byte[] data = outputGraphDef.ToArray(); ////string result = Encoding.GetEncoding("ascii").GetString(data); ////Google.Protobuf.ByteString byteString = Google.Protobuf.ByteString.CopyFrom(data, 0, data.Length); ////byteString.WriteTo(fs); ////开始写入 //String tmp2 = Encoding.UTF8.GetString(data); //sw.Write(stringdata); ////开始写入 //fs.Write(data, 0, data.Length); ////清空缓冲区、关闭流 //fs.Flush(); //fs.Close(); }
public static void Main(string [] args) { TFSessionOptions options = new TFSessionOptions(); unsafe { //byte[] PUConfig = new byte[] { 0x32, 0x05, 0x20, 0x01, 0x2a, 0x01, 0x30, 0x38, 0x01 }; //gpu byte[] PUConfig = new byte[] { 0x0a, 0x07, 0x0a, 0x03, 0x67, 0x70, 0x75, 0x10, 0x00 }; //cpu fixed(void *ptr = &PUConfig[0]) { options.SetConfig(new IntPtr(ptr), PUConfig.Length); } } TFSession session; var graph = new TFGraph(); using (TFSession sess = new TFSession(graph, options)) using (var metaGraphUnused = new TFBuffer()) { session = sess.FromSavedModel(options, null, "tzb", new[] { "serve" }, graph, metaGraphUnused); IEnumerable <TensorFlow.DeviceAttributes> iem = session.ListDevices(); foreach (object obj in iem) { Console.WriteLine(((DeviceAttributes)obj).Name); } var labels = File.ReadAllLines("tzb/label.txt"); //打印节点名称 /*IEnumerable<TensorFlow.TFOperation> iem = graph.GetEnumerator(); * foreach (object obj in iem) * { * Console.WriteLine(((TFOperation)obj).Name); * }*/ //while(true) float[] eimg = new float[224 * 224]; for (int i = 0; i < 224 * 224; i++) { eimg[i] = 0; } TFTensor ten = TFTensor.FromBuffer(tfs, eimg, 0, 224 * 224 * 1); for (int j = 0; j < 3; j++) { var runner = session.GetRunner(); runner.AddInput(graph["images"][0], ten).Fetch(graph["classes"].Name); var output = runner.Run(); } ten.Dispose(); string[] files = Directory.GetFiles("tzb/images/defect", "*.*"); //while(true) foreach (string file in files) { DateTime bft = DateTime.Now; //var tensor = Image2Tensor(file); //break; var tensor = ImageUtil.CreateTensorFromImageFile(file); //TFTensor tensor = TFTensor.FromBuffer(tfs, eimg, 0, 224 * 224); var runner = session.GetRunner(); runner.AddInput(graph["images"][0], tensor).Fetch(graph["classes"].Name); var output = runner.Run(); DateTime aft = DateTime.Now; TimeSpan ts = aft.Subtract(bft); System.Threading.Thread.Sleep(50); var result = output[0]; int class_ = ((int[])result.GetValue(jagged: true))[0]; Console.WriteLine(file + " best_match: " + class_ + " " + labels[class_] + " time: " + ts.TotalMilliseconds); } } }
public void Save(ModelSaveContext ctx) { _host.AssertValue(ctx); ctx.CheckAtModel(); ctx.SetVersionInfo(GetVersionInfo()); // *** Binary format *** // byte: indicator for frozen models // stream: tensorFlow model. // int: number of input columns // for each input column // int: id of int column name // int: number of output columns // for each output column // int: id of output column name var isFrozen = string.IsNullOrEmpty(_savedModelPath); ctx.Writer.WriteBoolByte(isFrozen); if (isFrozen) { var buffer = new TFBuffer(); Session.Graph.ToGraphDef(buffer); ctx.SaveBinaryStream("TFModel", w => { w.WriteByteArray(buffer.ToArray()); }); } else { ctx.SaveBinaryStream("TFSavedModel", w => { string[] modelFilePaths = Directory.GetFiles(_savedModelPath, "*", SearchOption.AllDirectories); w.Write(modelFilePaths.Length); foreach (var fullPath in modelFilePaths) { var relativePath = fullPath.Substring(_savedModelPath.Length + 1); w.Write(relativePath); using (var fs = new FileStream(fullPath, FileMode.Open)) { long fileLength = fs.Length; w.Write(fileLength); long actualWritten = fs.CopyRange(w.BaseStream, fileLength); _host.Assert(actualWritten == fileLength); } } }); } _host.AssertNonEmpty(Inputs); ctx.Writer.Write(Inputs.Length); foreach (var colName in Inputs) { ctx.SaveNonEmptyString(colName); } _host.AssertNonEmpty(Outputs); ctx.Writer.Write(Outputs.Length); foreach (var colName in Outputs) { ctx.SaveNonEmptyString(colName); } }
private void Start() { _modelBuffer = new TFBuffer(Model.bytes); }