/// <summary> /// Create a new Status /// </summary> public Status() { _ptr = TfInvoke.tfeNewStatus(); }
/// <summary> /// Operation will only be added to graph when FinishOperation() is /// called (assuming FinishOperation() does not return an error). /// Graph must not be deleted until after FinishOperation() is /// called. /// </summary> /// <param name="opType">The operation type</param> /// <param name="opName">The name of the operation</param> /// <returns>A new operation description</returns> public OperationDescription NewOperation(String opType, String opName) { return(new OperationDescription(TfInvoke.tfeNewOperation(_ptr, opType, opName))); }
/// <summary> /// Write out a serialized representation of `graph` (as a GraphDef protocol /// message). /// </summary> /// <param name="outputGraphDef">The buffer to store the GraphDef</param> /// <param name="status">The status</param> public void ToGraphDef(Buffer outputGraphDef, Status status = null) { using (StatusChecker checker = new StatusChecker(status)) TfInvoke.tfeGraphToGraphDef(_ptr, outputGraphDef, checker.Status); }
/// <summary> /// Set the prefix to be prepended to the names of nodes in `graph_def` that will /// be imported into `graph`. /// </summary> /// <param name="prefix">The node prefix</param> public void SetPrefix(String prefix) { TfInvoke.tfeImportGraphDefOptionsSetPrefix(_ptr, prefix); }
/// <summary> /// Set any imported nodes with control input <paramref name="srcName"/> to have that input /// replaced with <paramref name="dst"/> /// </summary> /// <param name="srcName">Refers to a node in the graph to be imported</param> /// <param name="dst">References an operation already existing in the graph being imported into</param> public void RemapControlDependency(String srcName, Operation dst) { TfInvoke.tfeImportGraphDefOptionsRemapControlDependency(_ptr, srcName, dst); }
/// <summary> /// Create a Session from a SavedModel. If successful, populates the internal graph with the contents of the Graph and /// with the MetaGraphDef of the loaded model. /// </summary> /// <param name="exportDir">Must be set to the path of the exported SavedModel.</param> /// <param name="tags">Must include the set of tags used to identify one MetaGraphDef in the SavedModel. Could be "serve", "tpu", "gpu", "train" or other values.</param> /// <param name="sessionOptions">Session options</param> /// <param name="runOptions"></param> /// <param name="status">The status</param> public Session( String exportDir, String[] tags, SessionOptions sessionOptions = null, Buffer runOptions = null, Status status = null) { _graph = new Graph(); _graphNeedDispose = true; _metaGraphDef = new Buffer(); IntPtr exportDirPtr = Marshal.StringToHGlobalAnsi(exportDir); IntPtr[] tagsNative; GCHandle tagsNativeHandle; IntPtr tagsNativePointer = IntPtr.Zero; if (tags != null) { tagsNative = new IntPtr[tags.Length]; for (int i = 0; i < tags.Length; i++) { tagsNative[i] = Marshal.StringToHGlobalAnsi(tags[i]); } tagsNativeHandle = GCHandle.Alloc(tagsNative, GCHandleType.Pinned); tagsNativePointer = tagsNativeHandle.AddrOfPinnedObject(); } else { tagsNativeHandle = new GCHandle(); tagsNative = new IntPtr[0]; } try { using (StatusChecker checker = new StatusChecker(status)) _ptr = TfInvoke.tfeLoadSessionFromSavedModel( sessionOptions, runOptions, exportDirPtr, tagsNativePointer, tagsNative.Length, _graph, _metaGraphDef, checker.Status ); } catch (Exception excpt) { Trace.WriteLine(excpt.Message); throw; } finally { Marshal.FreeHGlobal(exportDirPtr); if (tagsNativeHandle.IsAllocated) { tagsNativeHandle.Free(); } for (int i = 0; i < tagsNative.Length; i++) { Marshal.FreeHGlobal(tagsNative[i]); } } }
/// <summary> /// Add an output in graph_def to be returned via the `return_outputs` output /// parameter. If the output is remapped via an input /// mapping, the corresponding existing tensor in graph will be returned. /// </summary> /// <param name="operName">The name of the operation</param> /// <param name="index">The index</param> public void AddReturnOutput(String operName, int index) { TfInvoke.tfeImportGraphDefOptionsAddReturnOutput(_ptr, operName, index); }
/// <summary> /// Set a DataType value as an attribute /// </summary> /// <param name="attrName">The attribute name</param> /// <param name="type">The type</param> public void SetAttr(String attrName, DataType type) { TfInvoke.tfeSetAttrType(_ptr, attrName, type); }
/// <summary> /// Set a Tensor as an attribute /// </summary> /// <param name="attrName">The name of the attribute</param> /// <param name="tensor">The Tensor</param> /// <param name="status">The status</param> public void SetAttr(String attrName, Tensor tensor, Status status = null) { using (StatusChecker checker = new StatusChecker(status)) TfInvoke.tfeSetAttrTensor(_ptr, attrName, tensor, checker.Status); }
/// <summary> /// Set a floating point value as an attribute /// </summary> /// <param name="attrName">The attribute name</param> /// <param name="value">The value</param> public void SetAttr(String attrName, float value) { TfInvoke.tfeSetAttrFloat(_ptr, attrName, value); }
/// <summary> /// Set a string value as an attribute /// </summary> /// <param name="attrName">The attribute name</param> /// <param name="value">The value</param> public void SetAttr(String attrName, String value) { TfInvoke.tfeSetAttrString(_ptr, attrName, value, value.Length); }
/// <summary> /// Set a boolean value as an attribute /// </summary> /// <param name="attrName">The attribute name</param> /// <param name="value">The value</param> public void SetAttr(String attrName, bool value) { TfInvoke.tfeSetAttrBool(_ptr, attrName, value); }
/// <summary> /// Create a new empty TString /// </summary> public TString() { _needDispose = true; _ptr = TfInvoke.tfeTStringCreate(); }
static TString() { _sizeOfTString = TfInvoke.tfeTStringTypeSize(); }
/// <summary> /// Close a session. /// Contacts any other processes associated with the session, if applicable. /// </summary> /// <param name="status">The status</param> public void Close(Status status = null) { using (StatusChecker checker = new StatusChecker(status)) TfInvoke.tfeCloseSession(_ptr, checker.Status); }
/// <summary> /// Specify the device /// </summary> /// <param name="device">The device name</param> public void SetDevice(String device) { TfInvoke.tfeSetDevice(_ptr, device); }
/// <summary> /// Run the graph associated with the session starting with the supplied inputs /// (inputs[0,ninputs-1] with corresponding values in input_values[0,ninputs-1]). /// </summary> /// <param name="inputs">The input nodes</param> /// <param name="inputValues">The input values</param> /// <param name="outputs">The output nodes</param> /// <param name="targetOperations">Optional target operations</param> /// <param name="runOptions"></param> /// May be NULL, in which case it will be ignored; or /// non-NULL, in which case it must point to a `TF_Buffer` containing the /// serialized representation of a `RunOptions` protocol buffer. /// <param name="runMetadata"> /// May be NULL, in which case it will be ignored; or /// non-NULL, in which case it must point to an empty, freshly allocated /// `TF_Buffer` that may be updated to contain the serialized representation /// of a `RunMetadata` protocol buffer. /// </param> /// <param name="status">The status</param> /// <returns>On success, the tensors corresponding to outputs[0,noutputs-1] are placed in the returned Tensors.</returns> public Tensor[] Run(Output[] inputs, Tensor[] inputValues, Output[] outputs, Operation[] targetOperations = null, Buffer runOptions = null, Buffer runMetadata = null, Status status = null) { IntPtr[] inputOps = Array.ConvertAll(inputs, i => i.Operation.Ptr); int[] inputIdx = Array.ConvertAll(inputs, i => i.Index); IntPtr[] inputTensors = Array.ConvertAll(inputValues, i => i.Ptr); GCHandle inputOpsHandle = GCHandle.Alloc(inputOps, GCHandleType.Pinned); GCHandle inputIdxHandle = GCHandle.Alloc(inputIdx, GCHandleType.Pinned); GCHandle inputTensorsHandle = GCHandle.Alloc(inputTensors, GCHandleType.Pinned); IntPtr[] outputOps = Array.ConvertAll(outputs, o => o.Operation.Ptr); int[] outputIdx = Array.ConvertAll(outputs, o => o.Index); IntPtr[] outputTensors = new IntPtr[outputs.Length]; GCHandle outputOpsHandle = GCHandle.Alloc(outputOps, GCHandleType.Pinned); GCHandle outputIdxHandle = GCHandle.Alloc(outputIdx, GCHandleType.Pinned); GCHandle outputTensorsHandle = GCHandle.Alloc(outputTensors, GCHandleType.Pinned); IntPtr targetOpsPtr = IntPtr.Zero; int ntargets = 0; IntPtr[] targetOpsPtrArray = null; GCHandle targetOpsHandle = new GCHandle(); if (targetOperations != null) { targetOpsPtrArray = Array.ConvertAll(targetOperations, o => o.Ptr); targetOpsHandle = GCHandle.Alloc(targetOpsPtrArray, GCHandleType.Pinned); targetOpsPtr = targetOpsHandle.AddrOfPinnedObject(); ntargets = targetOperations.Length; } using (StatusChecker checker = new StatusChecker(status)) { TfInvoke.tfeSessionRun( _ptr, runOptions, inputOpsHandle.AddrOfPinnedObject(), inputIdxHandle.AddrOfPinnedObject(), inputTensorsHandle.AddrOfPinnedObject(), inputs.Length, outputOpsHandle.AddrOfPinnedObject(), outputIdxHandle.AddrOfPinnedObject(), outputTensorsHandle.AddrOfPinnedObject(), outputs.Length, targetOpsPtr, ntargets, runMetadata, checker.Status ); } inputOpsHandle.Free(); inputIdxHandle.Free(); inputTensorsHandle.Free(); if (targetOperations != null) { targetOpsHandle.Free(); } outputOpsHandle.Free(); outputIdxHandle.Free(); outputTensorsHandle.Free(); return(Array.ConvertAll(outputTensors, t => new Tensor(t))); }
/// <summary> /// Call once per control input to this Operation description /// </summary> /// <param name="input">The control input</param> public void AddControlInput(Operation input) { TfInvoke.tfeAddControlInput(_ptr, input); }
/// <summary> /// Create a new empty buffer /// </summary> public Buffer() { _ptr = TfInvoke.tfeNewBuffer(); }
/// <summary> /// Request this operation be co-located on the device where <paramref name="op"/> /// is placed. /// </summary> /// <param name="op">The other operation</param> public void ColocateWith(Operation op) { TfInvoke.tfeColocateWith(_ptr, op); }
/// <summary> /// Create an empty GraphDefOptions /// </summary> public ImportGraphDefOptions() { _ptr = TfInvoke.tfeNewImportGraphDefOptions(); }
/// <summary> /// Add the input to this operation description /// </summary> /// <param name="input">The input to the operation</param> public void AddInput(Output input) { TfInvoke.tfeAddInput(_ptr, input.Operation, input.Index); }
/// <summary> /// Set the execution device for nodes in GraphDef. /// Only applies to nodes where a device was not already explicitly specified. /// </summary> /// <param name="device">The device name</param> public void SetDefaultDevice(String device) { TfInvoke.tfeImportGraphDefOptionsSetDefaultDevice(_ptr, device); }
/// <summary> /// Set a long value as an attribute /// </summary> /// <param name="attrName">The attribute name</param> /// <param name="value">The value</param> public void SetAttr(String attrName, long value) { TfInvoke.tfeSetAttrInt(_ptr, attrName, value); }
/// <summary> /// Cause the imported graph to have a control dependency on <paramref name="oper"/> /// </summary> /// <param name="oper">The opration that the graph will have a control dependecy on. Should exist in the graph being imported into.</param> public void AddControlDependency(Operation oper) { TfInvoke.tfeImportGraphDefOptionsAddControlDependency(_ptr, oper); }
/// <summary> /// Create a single element tensor /// </summary> /// <param name="dataType">The Type of the Tensor</param> /// <param name="sizeInBytes">The size in bytes</param> private Tensor(DataType dataType, int sizeInBytes) { _ptr = TfInvoke.tfeAllocateTensor(dataType, IntPtr.Zero, 0, sizeInBytes); }
/// <summary> /// Iterate through the operations of a graph. /// </summary> /// <param name="pos">The position pointer that can be used to iterate though the operations of this graph. Use IntPtr.Zero to get the first operation</param> /// <returns>The next operation from the position</returns> public Operation NextOperation(ref IntPtr pos) { return(new Operation(TfInvoke.tfeGraphNextOperation(_ptr, ref pos))); }
/// <summary> /// Import the graph serialized in <paramref name="graphDef"/> into the current graph. /// Convenience function for when no return outputs have been added. /// </summary> /// <param name="graphDef">The GraphDef to be imported</param> /// <param name="options">The import options</param> /// <param name="status">The status</param> public void ImportGraphDef(Buffer graphDef, ImportGraphDefOptions options, Status status = null) { using (StatusChecker checker = new StatusChecker(status)) TfInvoke.tfeGraphImportGraphDef(_ptr, graphDef, options, checker.Status); }
/// <summary> /// Create a new Graph /// </summary> public Graph() { _ptr = TfInvoke.tfeNewGraph(); }
/// <summary> /// Write out a serialized representation of this Function (as a FunctionDef protocol /// message) /// </summary> /// <param name="outputFuncDef">a serialized representation of this Function (as a FunctionDef protocol message) </param> /// <param name="status">The status</param> public void ToFunctionDef(Buffer outputFuncDef, Status status = null) { using (StatusChecker checker = new StatusChecker(status)) TfInvoke.tfeFunctionToFunctionDef(_ptr, outputFuncDef, checker.Status); }