private static async Task Run() { SessionOptions so = new SessionOptions(); if (TfInvoke.IsGoogleCudaEnabled) { Tensorflow.ConfigProto config = new Tensorflow.ConfigProto(); config.GpuOptions = new Tensorflow.GPUOptions(); config.GpuOptions.AllowGrowth = true; so.SetConfig(config.ToProtobuf()); } _inceptionGraph = new Emgu.TF.Models.Inception(null, so); _inceptionGraph.OnDownloadProgressChanged += onDownloadProgressChanged; //_inceptionGraph.OnDownloadCompleted += onDownloadCompleted; //use a retrained model to recognize followers await _inceptionGraph.Init( new string[] { "optimized_graph.pb", "output_labels.txt" }, "https://github.com/emgucv/models/raw/master/inception_flower_retrain/", "Placeholder", "final_result"); Stopwatch watch = Stopwatch.StartNew(); Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile <float>(_inputFileInfo.FullName, 299, 299, 0.0f, 1.0f / 255.0f, false, false); var results = _inceptionGraph.Recognize(imageTensor); watch.Stop(); String resStr = String.Format("Object is {0} with {1}% probability. Recognition completed in {2} milliseconds.", results[0].Label, results[0].Probability * 100, watch.ElapsedMilliseconds); System.Console.WriteLine(resStr); }
public MainForm() { InitializeComponent(); TfInvoke.Init(); messageLabel.Text = String.Empty; cameraButton.Text = _startCameraText; //DisableUI(); SessionOptions so = new SessionOptions(); if (TfInvoke.IsGoogleCudaEnabled) { Tensorflow.ConfigProto config = new Tensorflow.ConfigProto(); config.GpuOptions = new Tensorflow.GPUOptions(); config.GpuOptions.AllowGrowth = true; so.SetConfig(config.ToProtobuf()); } _inceptionGraph = new MaskRcnnInceptionV2Coco(null, so); _inceptionGraph.OnDownloadProgressChanged += OnDownloadProgressChangedEventHandler; //_inceptionGraph.Init(); }
public SessionOptions(string target = "", ConfigProto config = null) { Handle = c_api.TF_NewSessionOptions(); c_api.TF_SetTarget(Handle, target); if (config != null) { SetConfig(config); } }
public MultiboxDetectionPage() : base() { Title = "Multibox People Detection"; if (_multiboxGraph == null) { SessionOptions so = new SessionOptions(); if (TfInvoke.IsGoogleCudaEnabled) { Tensorflow.ConfigProto config = new Tensorflow.ConfigProto(); config.GpuOptions = new Tensorflow.GPUOptions(); config.GpuOptions.AllowGrowth = true; so.SetConfig(config.ToProtobuf()); } _multiboxGraph = new MultiboxGraph(null, so); _multiboxGraph.OnDownloadProgressChanged += onDownloadProgressChanged; _multiboxGraph.OnDownloadCompleted += onDownloadCompleted; _multiboxGraph.OnDownloadCompleted += (sender, e) => { OnButtonClicked(sender, e); }; } OnImagesLoaded += (sender, image) => { try { SetMessage("Please wait..."); SetImage(); Stopwatch watch = Stopwatch.StartNew(); Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile <float>(image[0], 224, 224, 128.0f, 1.0f / 128.0f); MultiboxGraph.Result[] detectResult = _multiboxGraph.Detect(imageTensor); watch.Stop(); Emgu.Models.Annotation[] annotations = MultiboxGraph.FilterResults(detectResult, 0.1f); var jpeg = Emgu.Models.NativeImageIO.ImageFileToJpeg(image[0], annotations); watch.Stop(); SetImage(jpeg.Raw, jpeg.Width, jpeg.Height); #if __MACOS__ var displayImage = this.DisplayImage; displayImage.WidthRequest = jpeg.Width; displayImage.HeightRequest = jpeg.Height; #endif SetMessage(String.Format("Detected in {0} milliseconds.", watch.ElapsedMilliseconds)); } catch (Exception excpt) { String msg = excpt.Message.Replace(System.Environment.NewLine, " "); SetMessage(msg); } }; }
public Status SetConfig(ConfigProto config) { var bytes = config.ToByteArray(); var proto = Marshal.AllocHGlobal(bytes.Length); Marshal.Copy(bytes, 0, proto, bytes.Length); c_api.TF_SetConfig(_handle, proto, (ulong)bytes.Length, _status); _status.Check(false); return(_status); }
public static byte[] ToProtobuf(this ConfigProto cp) { using (MemoryStream ms = new MemoryStream()) using (pb::CodedOutputStream stream = new pb::CodedOutputStream(ms)) { cp.WriteTo(stream); stream.Flush(); return(ms.ToArray()); } }
private unsafe void SetConfig(ConfigProto config) { var bytes = config.ToByteArray(); fixed(byte *proto2 = bytes) { using (var status = new Status()) { c_api.TF_SetConfig(Handle, (IntPtr)proto2, (ulong)bytes.Length, status.Handle); status.Check(false); } } }
public StylizePage() : base() { Title = "Stylize"; if (_stylizeGraph == null) { SessionOptions so = new SessionOptions(); if (TfInvoke.IsGoogleCudaEnabled) { Tensorflow.ConfigProto config = new Tensorflow.ConfigProto(); config.GpuOptions = new Tensorflow.GPUOptions(); config.GpuOptions.AllowGrowth = true; so.SetConfig(config.ToProtobuf()); } _stylizeGraph = new StylizeGraph(null, so); _stylizeGraph.OnDownloadProgressChanged += onDownloadProgressChanged; _stylizeGraph.OnDownloadCompleted += onDownloadCompleted; _stylizeGraph.OnDownloadCompleted += (sender, e) => { OnButtonClicked(sender, e); }; } OnImagesLoaded += (sender, image) => { try { SetMessage("Please wait..."); SetImage(); Stopwatch watch = Stopwatch.StartNew(); byte[] jpeg = _stylizeGraph.StylizeToJpeg(image[0], 1); watch.Stop(); SetImage(jpeg); #if __MACOS__ NSImage img = new NSImage(image[0]); var displayImage = this.GetImage(); displayImage.WidthRequest = img.Size.Width; displayImage.HeightRequest = img.Size.Height; #endif SetMessage(String.Format("Stylized in {0} milliseconds.", watch.ElapsedMilliseconds)); } catch (Exception excpt) { String msg = excpt.Message.Replace(System.Environment.NewLine, " "); SetMessage(msg); } }; }
private static async Task Run() { SessionOptions so = new SessionOptions(); Tensorflow.ConfigProto config = new Tensorflow.ConfigProto(); #if DEBUG config.LogDevicePlacement = true; #endif if (TfInvoke.IsGoogleCudaEnabled) { config.GpuOptions = new Tensorflow.GPUOptions(); config.GpuOptions.AllowGrowth = true; } so.SetConfig(config.ToProtobuf()); _inceptionGraph = new Emgu.TF.Models.Inception(null, so); _inceptionGraph.OnDownloadProgressChanged += onDownloadProgressChanged; //_inceptionGraph.OnDownloadCompleted += onDownloadCompleted; System.Console.WriteLine("Initializing model"); //use a retrained model to recognize followers await _inceptionGraph.Init( new string[] { "optimized_graph.pb", "output_labels.txt" }, "https://github.com/emgucv/models/raw/master/inception_flower_retrain/", "Placeholder", "final_result"); System.Console.WriteLine("Model initialized."); Session.Device[] devices = GetSessionDevices(_inceptionGraph.Session); StringBuilder sb = new StringBuilder(); foreach (Session.Device d in devices) { sb.Append(String.Format("{1}: {0}{2}", d.Name, d.Type, Environment.NewLine)); } System.Console.WriteLine(String.Format("Default Session Devices:{0}{1}", Environment.NewLine, sb.ToString())); Stopwatch watch = Stopwatch.StartNew(); System.Console.WriteLine("Reading image into tensor"); Tensor imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile <float>(_inputFileInfo.FullName, 299, 299, 0.0f, 1.0f / 255.0f, false, false); System.Console.WriteLine("Running inference..."); var results = _inceptionGraph.Recognize(imageTensor); watch.Stop(); String resStr = String.Format("Object is {0} with {1}% probability. Recognition completed in {2} milliseconds.", results[0][0].Label, results[0][0].Probability * 100, watch.ElapsedMilliseconds); System.Console.WriteLine(resStr); }
public void SetConfig(ConfigProto config) { var bytes = config.ToByteArray(); var proto = Marshal.AllocHGlobal(bytes.Length); Marshal.Copy(bytes, 0, proto, bytes.Length); using (var status = new Status()) { c_api.TF_SetConfig(_handle, proto, (ulong)bytes.Length, status); status.Check(false); } Marshal.FreeHGlobal(proto); }
public void SetConfig(ConfigProto config) { var bytes = config.ToByteArray(); //TODO! we can use WriteTo var proto = Marshal.AllocHGlobal(bytes.Length); //TODO! potential memory leak Marshal.Copy(bytes, 0, proto, bytes.Length); using (var status = new Status()) { c_api.TF_SetConfig(_handle, proto, (ulong)bytes.Length, status); status.Check(false); } Marshal.FreeHGlobal(proto); }
public BaseSession(string target = "", Graph g = null, ConfigProto config = null, Status status = null) { _graph = g ?? ops.get_default_graph(); if (!_graph.building_function) { if (ops.get_default_graph() != _graph) { _graph.as_default(); } } using var opts = new SessionOptions(target, config); status = status ?? tf.Status; _handle = c_api.TF_NewSession(_graph, opts.Handle, status.Handle); status.Check(true); }
public BaseSession(string target = "", Graph g = null, ConfigProto config = null, Status status = null) { _graph = g ?? ops.get_default_graph(); _graph.as_default(); _target = Encoding.UTF8.GetBytes(target); using (var opts = new SessionOptions(target, config)) { lock (Locks.ProcessWide) { status = status ?? new Status(); _handle = c_api.TF_NewSession(_graph, opts.Handle, status.Handle); status.Check(true); } } }
public void TestCUDAEnabled() { bool cuda = TfInvoke.IsGoogleCudaEnabled; Tensorflow.ConfigProto config = new Tensorflow.ConfigProto(); config.LogDevicePlacement = true; byte[] pbuff; using (MemoryStream ms = new MemoryStream()) { config.WriteTo(ms); pbuff = ms.ToArray(); } SessionOptions options = new SessionOptions(); options.SetConfig(pbuff); Add(3, 4, options); }
private static void Run() { SessionOptions so = new SessionOptions(); if (TfInvoke.IsGoogleCudaEnabled) { Tensorflow.ConfigProto config = new Tensorflow.ConfigProto(); config.GpuOptions = new Tensorflow.GPUOptions(); config.GpuOptions.AllowGrowth = true; so.SetConfig(config.ToProtobuf()); } _inceptionGraph = new Emgu.TF.Models.Inception(null, so); _inceptionGraph.OnDownloadProgressChanged += onDownloadProgressChanged; _inceptionGraph.OnDownloadCompleted += onDownloadCompleted; //use a retrained model to recognize followers _inceptionGraph.Init( new string[] { "optimized_graph.pb", "output_labels.txt" }, "https://github.com/emgucv/models/raw/master/inception_flower_retrain/", "Placeholder", "final_result"); }
public void TestChooseDevice() { SessionOptions so = new SessionOptions(); Tensorflow.ConfigProto config = new Tensorflow.ConfigProto(); //config.DeviceCount.Add("GPU", 1); //config.DeviceCount.Add("CPU", 1); config.GpuOptions = new GPUOptions(); config.GpuOptions.VisibleDeviceList = "0"; //config.GpuOptions.VisibleDeviceList = "0, 1"; //var devicesList = config.GpuOptions.VisibleDeviceList; //config.LogDevicePlacement = true; if (TfInvoke.IsGoogleCudaEnabled) { so.SetConfig(config.ToProtobuf()); } int sum = Add(1, 2, so); }
/// <summary> /// Create and run a simple graph that add two numbers and returns the default session devices used. /// </summary> /// <returns></returns> private static Session.Device[] GetSessionDevices() { SessionOptions so = new SessionOptions(); if (TfInvoke.IsGoogleCudaEnabled) { Tensorflow.ConfigProto config = new Tensorflow.ConfigProto(); config.GpuOptions = new Tensorflow.GPUOptions(); config.GpuOptions.AllowGrowth = true; so.SetConfig(config.ToProtobuf()); } int a = 1; int b = 1; //Creating tensor from value a Tensor tensorA = new Tensor(a); //Creating tensor from value b Tensor tensorB = new Tensor(b); //Create a new graph Graph graph = new Graph(); //Place holder in the graph for tensorA Operation opA = graph.Placeholder(DataType.Int32, null, "valA"); //Place holder in the graph for tensorB Operation opB = graph.Placeholder(DataType.Int32, null, "valB"); //Adding the two tensor Operation sumOp = graph.Add(opA, opB, "sum"); //Create a new session using (Session session = new Session(graph, so)) { //Execute the session and get the sum Tensor[] results = session.Run(new Output[] { opA, opB }, new Tensor[] { tensorA, tensorB }, new Output[] { sumOp }); Session.Device[] devices = session.ListDevices(null); return(devices); } }
public InceptionPage(Model model) : base() { Title = model == Model.Flower ? "Flower Recognition" : "Object recognition (Inception)"; _model = model; if (_inceptionGraph == null) { SessionOptions so = new SessionOptions(); if (TfInvoke.IsGoogleCudaEnabled) { Tensorflow.ConfigProto config = new Tensorflow.ConfigProto(); config.GpuOptions = new Tensorflow.GPUOptions(); config.GpuOptions.AllowGrowth = true; so.SetConfig(config.ToProtobuf()); } _inceptionGraph = new Inception(null, so); _inceptionGraph.OnDownloadProgressChanged += onDownloadProgressChanged; _inceptionGraph.OnDownloadCompleted += onDownloadCompleted; _inceptionGraph.OnDownloadCompleted += (sender, e) => { OnButtonClicked(sender, e); }; } OnImagesLoaded += (sender, image) => { #if !DEBUG try #endif { SetMessage("Please wait..."); SetImage(); Tensor imageTensor; if (_model == Model.Flower) { imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile <float>(image[0], 299, 299, 0.0f, 1.0f / 255.0f, false, false); } else { imageTensor = Emgu.TF.Models.ImageIO.ReadTensorFromImageFile <float>(image[0], 224, 224, 128.0f, 1.0f); } Inception.RecognitionResult result; if (_coldSession) { //First run of the recognition graph, here we will compile the graph and initialize the session //This is expected to take much longer time than consecutive runs. result = _inceptionGraph.Recognize(imageTensor)[0]; _coldSession = false; } //Here we are trying to time the execution of the graph after it is loaded //If we are not interest in the performance, we can skip the following 3 lines Stopwatch sw = Stopwatch.StartNew(); result = _inceptionGraph.Recognize(imageTensor)[0]; sw.Stop(); String msg = String.Format("Object is {0} with {1}% probability. Recognized in {2} milliseconds.", result.Label, result.Probability * 100, sw.ElapsedMilliseconds); SetMessage(msg); var jpeg = Emgu.Models.NativeImageIO.ImageFileToJpeg(image[0]); SetImage(jpeg.Raw, jpeg.Width, jpeg.Height); } #if !DEBUG catch (Exception excpt) { String msg = excpt.Message.Replace(System.Environment.NewLine, " "); SetMessage(msg); } #endif }; }
public Session Session(Graph graph, ConfigProto config = null) { return(new Session(graph, config: config).as_default()); }
public Session Session(ConfigProto config) { return(new Session(null, config).as_default()); }
public Session(Graph g, ConfigProto config = null, Status s = null) : base("", g, config, s) { }