/// <summary> /// Dispose necessary objects. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Window_Closing(object sender, CancelEventArgs e) { camera.Stop(); camera.Dispose(); session.Dispose(); graph.Dispose(); }
public void Dispose() { Console.WriteLine("Finalizing TF model resources"); session.CloseSession(); session.Dispose(); Console.WriteLine("TF resources closed"); }
public void Dispose() { if (session != null) { session.Dispose(); } if (graph != null) { graph.Dispose(); } }
protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _session.Dispose(); _graph.Dispose(); } _disposed = true; } }
public void TestCleanup() { if (s != null) { s.CloseSession(); s.Dispose(); s = null; } if (g != null) { g.Dispose(); g = null; } }
void IDisposable.Dispose() { if (_session != null) { _session.Dispose(); _session = null; } if (_graph != null) { _graph.Dispose(); _graph = null; } }
private void Dispose(bool disposing) { // Ensure that the Session is not null and it's handle is not Zero, as it may have already been disposed/finalized. // Technically we shouldn't be calling this if disposing == false, since we're running in finalizer // and the GC doesn't guarantee ordering of finalization of managed objects, but we have to make sure // that the Session is closed before deleting our temporary directory. try { if (Session?.Handle != IntPtr.Zero) { Session.CloseSession(); Session.Dispose(); } } finally { if (!string.IsNullOrEmpty(_savedModelPath) && _isTemporarySavedModel) { TensorFlowUtils.DeleteFolderWithRetries(_host, _savedModelPath); } } }
public void Dispose() { _graph.Dispose(); _session.Dispose(); }
// run the CNN void evaluate() { // only run CNN if we have enough accelerometer values if (accelX.Count == inputWidth) { // convert from list to tensor // if tensor is 1 under, add dummy last value int i; for (i = 0; i < accelX.Count; i++) { inputTensor[0, 0, i, 0] = accelX[i]; test = inputTensor[0, 0, i, 0]; } if (i != inputWidth) { inputTensor[0, 0, inputWidth - 1, 0] = 0; } for (i = 0; i < accelY.Count; i++) { inputTensor[0, 0, i, 1] = accelY[i]; } if (i != inputWidth) { inputTensor[0, 0, inputWidth - 1, 1] = 0; } for (i = 0; i < accelZ.Count; i++) { inputTensor[0, 0, i, 2] = accelZ[i]; } if (i != inputWidth) { inputTensor[0, 0, inputWidth - 1, 2] = 0; } // tensor output variable float[,] recurrentTensor; // create tensorflow model using (var graph = new TFGraph()) { graph.Import(graphModel.bytes); var session = new TFSession(graph); var runner = session.GetRunner(); // do input tensor list to array and make it one dimensional TFTensor input = inputTensor; // set up input tensor and input runner.AddInput(graph["input_placeholder_x"][0], input); // set up output tensor runner.Fetch(graph["output_node"][0]); // run model recurrentTensor = runner.Run()[0].GetValue() as float[, ]; here = true; // dispose resources - keeps cnn from breaking down later session.Dispose(); graph.Dispose(); } // find the most confident answer float highVal = 0; int highInd = -1; sum = 0f; // *MAKE SURE ACTIVITYINDEXCHOICES MATCHES THE NUMBER OF CHOICES* for (int j = 0; j < activityIndexChoices; j++) { confidence = recurrentTensor[0, j]; if (highInd > -1) { if (recurrentTensor[0, j] > highVal) { highVal = confidence; highInd = j; } } else { highVal = confidence; highInd = j; } // debugging - sum should = 1 at the end sum += confidence; } // debugging test1 = recurrentTensor[0, 0]; test2 = recurrentTensor[0, 1]; test3 = recurrentTensor[0, 2]; // used in movement to see if we should be moving index = highInd; countCNN++; } }
public void Close() { session?.Dispose(); graph?.Dispose(); labels = null; }
public TFTensor 转换方法() { //if (b) //{ // contents = File.ReadAllBytes(file); // // DecodeJpeg uses a scalar String-valued tensor as input. // tensor = TFTensor.CreateString(contents); // b = false; //} contents = File.ReadAllBytes(file); //// DecodeJpeg uses a scalar String-valued tensor as input. tensor = TFTensor.CreateString(contents); //contents = null; // Construct a graph to normalize the image 归一化 // Execute that graph to normalize this one image 执行图规范化这个形象 //using (var session = new TFSession(graph1)) //{ // var normalized = session.Run( // inputs: new[] { input }, // inputValues: new[] { tensor }, // outputs: new[] { output }); // //tensor = null; // return normalized[0]; //} //using (var session = new TFSession(graph1)) //{ //var session = new TFSession(graph1); // var runner = session.GetRunner(); // runner.AddInput(input, tensor); // runner.Fetch(output); // normalized = runner.Run(); // //session.CloseSession(); // session.DeleteSession(); // runner = null; // //return normalized[0]; ////} //return 1; using (var session = new TFSession(graph1)) { try { var runner = session.GetRunner(); runner.AddInput(input, tensor); runner.Fetch(output); normalized = runner.Run(); return(normalized[0]); } catch (Exception e) { //Console.WriteLine(e); throw; } finally { session.Dispose(); //session.CloseSession(); //session.DeleteSession(); } } //normalized = session.Run( // inputs: new[] { input }, // inputValues: new[] { tensor }, // outputs: new[] { output }); //tensor = null; //return normalized[0]; }
//double[] retResult; public Int32 识别方法(out float ou) { tensor = _图片转换成张量类.转换方法(); var probabilities11 = ((float[][][][])tensor.GetValue(jagged: true))[0]; //tensor = CreateTensorFromImageFile(临时图片路径); using (var session = new TFSession(graph)) { try { var runner = session.GetRunner(); //runner.AddInput(graph["x_input"][0], tensor).Fetch(graph["softmax_linear/softmax_linear"][0]); //runner.AddInput(graph["x_input"][0], tensor).Fetch(graph["softmax_z/softmax_z"][0]); runner.AddInput(graph["x_input"][0], tensor); runner.Fetch(graph["softmax_z/softmax_z"][0]); output = runner.Run(); } catch (Exception e) { Console.WriteLine(e); throw; } finally { session.Dispose(); //session.CloseSession(); //session.DeleteSession(); } } //Random rd = new Random(); //ou = rd.Next(100, 500); //return rd.Next(100, 500); result = output[0]; rshape = result.Shape; //var probabilities1 = ((float[][])result.GetValue(jagged: true))[0]; //var val1 = (float[,])result.GetValue(jagged: false); //var val2 = (float[])result.GetValue(jagged: true); if (result.NumDims != 2 || rshape[0] != 1) { var shape = ""; foreach (var d in rshape) { shape += $"{d} "; } shape = shape.Trim(); Console.WriteLine($"Error: expected to produce a [1 N] shaped tensor where N is the number of labels, instead it produced one with shape [{shape}]"); Environment.Exit(1); } jagged = true; //jagged = false; bestIdx = 0; p = 0; best = 0; if (jagged) { var probabilities = ((float[][])result.GetValue(jagged: true))[0]; //double[] d = floatTodouble(probabilities); //double[] retResult = Softmax(d); for (int i = 0; i < probabilities.Length; i++) { if (probabilities[i] > best) { bestIdx = i; //best = probabilities[i]; best = probabilities[i]; } } } else { var val = (float[, ])result.GetValue(jagged: false); for (int i = 0; i < val.GetLength(1); i++) { if (val[0, i] > best) { bestIdx = i; best = val[0, i]; } } } tensor = null; result = null; rshape = null; ou = best; return(bestIdx); }
public void Dispose() { _graph?.Dispose(); _session?.Dispose(); }