/// <summary> /// Load TensorFlow model into memory. This is the convenience method that allows the model to be loaded once and subsequently use it for querying schema and creation of /// <see cref="TensorFlowEstimator"/> using <see cref="TensorFlowModel.ScoreTensorFlowModel(string, string, bool)"/>. /// </summary> /// <param name="catalog">The transform's catalog.</param> /// <param name="modelLocation">Location of the TensorFlow model.</param> public static TensorFlowModel LoadTensorFlowModel(this ModelOperationsCatalog catalog, string modelLocation) => TensorFlowUtils.LoadTensorFlowModel(CatalogUtils.GetEnvironment(catalog), modelLocation);
/// <summary> /// Get <see cref="Schema"/> for only those nodes which are marked "Placeholder" in the TensorFlow model. /// This method is convenient for exploring the model input(s) in case TensorFlow graph is very large. /// </summary> public Schema GetInputSchema() { return(TensorFlowUtils.GetModelSchema(_env, Session.Graph, "Placeholder")); }
/// <summary> /// Example use of the TensorFlow sentiment classification model. /// </summary> public static void Example() { string modelLocation = SamplesUtils.DatasetUtils.DownloadTensorFlowSentimentModel(); var mlContext = new MLContext(); var data = new[] { new IMDBSentiment() { Sentiment_Text = "this film was just brilliant casting location scenery story direction " + "everyone's really suited the part they played and you could just imagine being there robert " + "is an amazing actor and now the same being director father came from the same scottish " + "island as myself so i loved the fact there was a real connection with this film the witty " + "remarks throughout the film were great it was just brilliant so much that i bought the " + "film as soon as it was released for and would recommend it to everyone to watch and the " + "fly fishing was amazing really cried at the end it was so sad and you know what they say " + "if you cry at a film it must have been good and this definitely was also to the two " + "little boy's that played the of norman and paul they were just brilliant children are " + "often left out of the list i think because the stars that play them all grown up are " + "such a big profile for the whole film but these children are amazing and should be praised " + "for what they have done don't you think the whole story was so lovely because it was true " + "and was someone's life after all that was shared with us all" } }; var dataView = mlContext.Data.ReadFromEnumerable(data); // This is the dictionary to convert words into the integer indexes. var lookupMap = mlContext.Data.ReadFromTextFile(Path.Combine(modelLocation, "imdb_word_index.csv"), columns: new[] { new TextLoader.Column("Words", DataKind.TX, 0), new TextLoader.Column("Ids", DataKind.I4, 1), }, separatorChar: ',' ); // Load the TensorFlow model once. // - Use it for quering the schema for input and output in the model // - Use it for prediction in the pipeline. var modelInfo = TensorFlowUtils.LoadTensorFlowModel(mlContext, modelLocation); var schema = modelInfo.GetModelSchema(); var featuresType = (VectorType)schema["Features"].Type; Console.WriteLine("Name: {0}, Type: {1}, Shape: (-1, {2})", "Features", featuresType.ItemType.RawType, featuresType.Dimensions[0]); var predictionType = (VectorType)schema["Prediction/Softmax"].Type; Console.WriteLine("Name: {0}, Type: {1}, Shape: (-1, {2})", "Prediction/Softmax", predictionType.ItemType.RawType, predictionType.Dimensions[0]); // The model expects the input feature vector to be a fixed length vector. // In this sample, CustomMappingEstimator is used to resize variable length vector to fixed length vector. // The following ML.NET pipeline // 1. tokenzies the string into words, // 2. maps each word to an integer which is an index in the dictionary ('lookupMap'), // 3. Resizes the integer vector to a fixed length vector using CustomMappingEstimator ('ResizeFeaturesAction') // 4. Passes the data to TensorFlow for scoring. // 5. Retreives the 'Prediction' from TensorFlow and put it into ML.NET Pipeline Action <IMDBSentiment, IntermediateFeatures> ResizeFeaturesAction = (i, j) => { j.Sentiment_Text = i.Sentiment_Text; var features = i.VariableLenghtFeatures; Array.Resize(ref features, MaxSentenceLenth); j.Features = features; }; var engine = mlContext.Transforms.Text.TokenizeWords("TokenizedWords", "Sentiment_Text") .Append(mlContext.Transforms.Conversion.ValueMap(lookupMap, "Words", "Ids", new[] { ("VariableLenghtFeatures", "TokenizedWords") }))
/// <summary> /// Get <see cref="DataViewSchema"/> for complete model. Every node in the TensorFlow model will be included in the <see cref="DataViewSchema"/> object. /// </summary> public DataViewSchema GetModelSchema() { return(TensorFlowUtils.GetModelSchema(_env, Session.Graph)); }
/// <summary> /// Get <see cref="Schema"/> for complete model. Every node in the TensorFlow model will be included in the <see cref="Schema"/> object. /// </summary> internal Schema GetModelSchema() { return(TensorFlowUtils.GetModelSchema(_env, Session.Graph)); }
[ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // TensorFlow is 64-bit only public void TensorFlowTransformCifarSavedModel() { var model_location = "cifar_saved_model"; var env = new MLContext(); var tensorFlowModel = TensorFlowUtils.LoadTensorFlowModel(env, model_location); var schema = tensorFlowModel.GetInputSchema(); Assert.True(schema.TryGetColumnIndex("Input", out int column)); var type = (VectorType)schema.GetColumnType(column); var imageHeight = type.Dimensions[0]; var imageWidth = type.Dimensions[1]; var dataFile = GetDataPath("images/images.tsv"); var imageFolder = Path.GetDirectoryName(dataFile); var data = TextLoader.Create(env, new TextLoader.Arguments() { Column = new[] { new TextLoader.Column("ImagePath", DataKind.TX, 0), new TextLoader.Column("Name", DataKind.TX, 1), } }, new MultiFileSource(dataFile)); var images = ImageLoaderTransform.Create(env, new ImageLoaderTransform.Arguments() { Column = new ImageLoaderTransform.Column[1] { new ImageLoaderTransform.Column() { Source = "ImagePath", Name = "ImageReal" } }, ImageFolder = imageFolder }, data); var cropped = ImageResizerTransform.Create(env, new ImageResizerTransform.Arguments() { Column = new ImageResizerTransform.Column[1] { new ImageResizerTransform.Column() { Source = "ImageReal", Name = "ImageCropped", ImageHeight = imageHeight, ImageWidth = imageWidth, Resizing = ImageResizerTransform.ResizingKind.IsoCrop } } }, images); var pixels = ImagePixelExtractorTransform.Create(env, new ImagePixelExtractorTransform.Arguments() { Column = new ImagePixelExtractorTransform.Column[1] { new ImagePixelExtractorTransform.Column() { Source = "ImageCropped", Name = "Input", UseAlpha = false, InterleaveArgb = true } } }, cropped); IDataView trans = TensorFlowTransform.Create(env, pixels, tensorFlowModel, new[] { "Output" }, new[] { "Input" }); trans.Schema.TryGetColumnIndex("Output", out int output); using (var cursor = trans.GetRowCursor(col => col == output)) { var buffer = default(VBuffer <float>); var getter = cursor.GetGetter <VBuffer <float> >(output); var numRows = 0; while (cursor.MoveNext()) { getter(ref buffer); Assert.Equal(10, buffer.Length); numRows += 1; } Assert.Equal(3, numRows); } }
[ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // TensorFlow is 64-bit only public void TensorFlowInputsOutputsSchemaTest() { var mlContext = new MLContext(seed: 1, conc: 1); var model_location = "mnist_model/frozen_saved_model.pb"; var schema = TensorFlowUtils.GetModelSchema(mlContext, model_location); Assert.Equal(86, schema.Count); Assert.True(schema.TryGetColumnIndex("Placeholder", out int col)); var type = (VectorType)schema[col].Type; Assert.Equal(2, type.Dimensions.Length); Assert.Equal(28, type.Dimensions[0]); Assert.Equal(28, type.Dimensions[1]); var metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowOperatorTypeKind].Type; Assert.NotNull(metadataType); Assert.True(metadataType is TextType); ReadOnlyMemory <char> opType = default; schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowOperatorTypeKind, ref opType); Assert.Equal("Placeholder", opType.ToString()); metadataType = schema[col].Metadata.Schema.GetColumnOrNull(TensorFlowUtils.TensorflowUpstreamOperatorsKind)?.Type; Assert.Null(metadataType); Assert.True(schema.TryGetColumnIndex("conv2d/Conv2D/ReadVariableOp", out col)); type = (VectorType)schema[col].Type; Assert.Equal(new[] { 5, 5, 1, 32 }, type.Dimensions); metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowOperatorTypeKind].Type; Assert.NotNull(metadataType); Assert.True(metadataType is TextType); schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowOperatorTypeKind, ref opType); Assert.Equal("Identity", opType.ToString()); metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowUpstreamOperatorsKind].Type; Assert.NotNull(metadataType); VBuffer <ReadOnlyMemory <char> > inputOps = default; schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowUpstreamOperatorsKind, ref inputOps); Assert.Equal(1, inputOps.Length); Assert.Equal("conv2d/kernel", inputOps.GetValues()[0].ToString()); Assert.True(schema.TryGetColumnIndex("conv2d/Conv2D", out col)); type = (VectorType)schema[col].Type; Assert.Equal(new[] { 28, 28, 32 }, type.Dimensions); metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowOperatorTypeKind].Type; Assert.NotNull(metadataType); Assert.True(metadataType is TextType); schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowOperatorTypeKind, ref opType); Assert.Equal("Conv2D", opType.ToString()); metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowUpstreamOperatorsKind].Type; Assert.NotNull(metadataType); schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowUpstreamOperatorsKind, ref inputOps); Assert.Equal(2, inputOps.Length); Assert.Equal("reshape/Reshape", inputOps.GetValues()[0].ToString()); Assert.Equal("conv2d/Conv2D/ReadVariableOp", inputOps.GetValues()[1].ToString()); Assert.True(schema.TryGetColumnIndex("Softmax", out col)); type = (VectorType)schema[col].Type; Assert.Equal(new[] { 10 }, type.Dimensions); metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowOperatorTypeKind].Type; Assert.NotNull(metadataType); Assert.True(metadataType is TextType); schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowOperatorTypeKind, ref opType); Assert.Equal("Softmax", opType.ToString()); metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowUpstreamOperatorsKind].Type; Assert.NotNull(metadataType); schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowUpstreamOperatorsKind, ref inputOps); Assert.Equal(1, inputOps.Length); Assert.Equal("sequential/dense_1/BiasAdd", inputOps.GetValues()[0].ToString()); model_location = "model_matmul/frozen_saved_model.pb"; schema = TensorFlowUtils.GetModelSchema(mlContext, model_location); char name = 'a'; for (int i = 0; i < schema.Count; i++) { Assert.Equal(name.ToString(), schema[i].Name); type = (VectorType)schema[i].Type; Assert.Equal(new[] { 2, 2 }, type.Dimensions); name++; } }
private TensorFlowTransform(IHostEnvironment env, TFSession session, string[] inputs, string[] outputs) { Contracts.CheckValue(env, nameof(env)); _host = env.Register(nameof(RegistrationName)); _host.CheckValue(session, nameof(session)); _host.CheckNonEmpty(inputs, nameof(inputs)); _host.CheckNonEmpty(outputs, nameof(outputs)); Session = session; foreach (var input in inputs) { _host.CheckNonWhiteSpace(input, nameof(inputs)); if (Session.Graph[input] == null) { throw _host.ExceptParam(nameof(inputs), $"Input column '{input}' does not exist in the model"); } var tfInput = new TFOutput(Session.Graph[input]); if (!TensorFlowUtils.IsTypeSupported(tfInput.OutputType)) { throw _host.ExceptParam(nameof(session), $"Input type '{tfInput.OutputType}' of input column '{input}' is not supported in TensorFlow"); } } var newNames = new HashSet <string>(); foreach (var output in outputs) { _host.CheckNonWhiteSpace(output, nameof(outputs)); if (!newNames.Add(output)) { throw _host.ExceptParam(nameof(outputs), $"Output column '{output}' specified multiple times"); } if (Session.Graph[output] == null) { throw _host.ExceptParam(nameof(outputs), $"Output column '{output}' does not exist in the model"); } } Inputs = inputs; TFInputTypes = new TFDataType[Inputs.Length]; TFInputShapes = new TFShape[Inputs.Length]; for (int i = 0; i < Inputs.Length; i++) { var tfInput = new TFOutput(Graph[Inputs[i]]); TFInputTypes[i] = tfInput.OutputType; TFInputShapes[i] = Graph.GetTensorShape(tfInput); if (TFInputShapes[i].NumDimensions != -1) { var newShape = new long[TFInputShapes[i].NumDimensions]; newShape[0] = TFInputShapes[i][0] == -1 ? BatchSize : TFInputShapes[i][0]; for (int j = 1; j < TFInputShapes[i].NumDimensions; j++) { newShape[j] = TFInputShapes[i][j]; } TFInputShapes[i] = new TFShape(newShape); } } Outputs = outputs; OutputTypes = new ColumnType[Outputs.Length]; TFOutputTypes = new TFDataType[Outputs.Length]; for (int i = 0; i < Outputs.Length; i++) { var tfOutput = new TFOutput(Graph[Outputs[i]]); var shape = Graph.GetTensorShape(tfOutput); int[] dims = shape.NumDimensions > 0 ? shape.ToIntArray().Skip(shape[0] == -1 ? BatchSize : 0).ToArray() : new[] { 0 }; var type = TensorFlowUtils.Tf2MlNetType(tfOutput.OutputType); OutputTypes[i] = new VectorType(type, dims); TFOutputTypes[i] = tfOutput.OutputType; } }
public Mapper(IHostEnvironment env, TensorFlowTransform parent, ISchema inputSchema) { Contracts.CheckValue(env, nameof(env)); _host = env.Register(nameof(Mapper)); _host.CheckValue(inputSchema, nameof(inputSchema)); _host.CheckValue(parent, nameof(parent)); _parent = parent; _schema = inputSchema; _inputColIndices = new int[_parent.Inputs.Length]; _isInputVector = new bool[_parent.Inputs.Length]; _fullySpecifiedShapes = new TFShape[_parent.Inputs.Length]; for (int i = 0; i < _parent.Inputs.Length; i++) { if (!inputSchema.TryGetColumnIndex(_parent.Inputs[i], out _inputColIndices[i])) { throw _host.Except($"Column {_parent.Inputs[i]} doesn't exist"); } var type = inputSchema.GetColumnType(_inputColIndices[i]); _isInputVector[i] = type.IsVector; var expectedType = TensorFlowUtils.Tf2MlNetType(_parent.TFInputTypes[i]); if (type.ItemType != expectedType) { throw _host.ExceptSchemaMismatch(nameof(inputSchema), "input", _parent.Inputs[i], expectedType.ToString(), type.ToString()); } var originalShape = _parent.TFInputShapes[i]; var shape = originalShape.ToIntArray(); var colTypeDims = Enumerable.Range(0, type.AsVector.DimCount + 1).Select(d => d == 0 ? 1 : (long)type.AsVector.GetDim(d - 1)).ToArray(); if (shape == null) { _fullySpecifiedShapes[i] = new TFShape(colTypeDims); } else if (type.AsVector.DimCount == 1) { // If the column is one dimension we make sure that the total size of the TF shape matches. // Compute the total size of the known dimensions of the shape. int valCount = shape.Where(x => x > 0).Aggregate((x, y) => x * y); // The column length should be divisible by this, so that the other dimensions can be integral. if (type.ValueCount % valCount != 0) { throw Contracts.Except($"Input shape mismatch: Input '{_parent.Inputs[i]}' has shape {originalShape.ToString()}, but input data is of length {type.ValueCount}."); } // If the shape is multi-dimensional, we should be able to create the length of the vector by plugging // in a single value for the unknown shapes. E.g., if the shape is [?,?,3], then there should exist a value // d such that d*d*3 is equal to the length of the input column. var d = originalShape.NumDimensions > 2 ? Math.Pow(type.ValueCount / valCount, 1.0 / (originalShape.NumDimensions - 2)) : 1; if (originalShape.NumDimensions > 2 && d - (int)d != 0) { throw Contracts.Except($"Input shape mismatch: Input '{_parent.Inputs[i]}' has shape {originalShape.ToString()}, but input data is of length {type.ValueCount}."); } // Fill in the unknown dimensions. var l = new long[originalShape.NumDimensions]; for (int ishape = 0; ishape < originalShape.NumDimensions; ishape++) { l[ishape] = originalShape[ishape] == -1 ? (int)d : originalShape[ishape]; } _fullySpecifiedShapes[i] = new TFShape(l); } else { if (shape.Select((dim, j) => dim != -1 && dim != colTypeDims[j]).Any(b => b)) { throw Contracts.Except($"Input shape mismatch: Input '{_parent.Inputs[i]}' has shape {originalShape.ToString()}, but input data is {type.AsVector.ToString()}."); } // Fill in the unknown dimensions. var l = new long[originalShape.NumDimensions]; for (int ishape = 0; ishape < originalShape.NumDimensions; ishape++) { l[ishape] = originalShape[ishape] == -1 ? colTypeDims[ishape] : originalShape[ishape]; } _fullySpecifiedShapes[i] = new TFShape(l); } } }
// Factory method for SignatureLoadModel. private static TensorFlowTransform Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); // *** Binary format *** // byte: indicator for frozen models // stream: tensorFlow model. // int: number of input columns // for each input column // int: id of int column name // int: number of output columns // for each output column // int: id of output column name GetModelInfo(env, ctx, out string[] inputs, out string[] outputs, out bool isFrozen); if (isFrozen) { byte[] modelBytes = null; if (!ctx.TryLoadBinaryStream("TFModel", r => modelBytes = r.ReadByteArray())) { throw env.ExceptDecode(); } return(new TensorFlowTransform(env, TensorFlowUtils.LoadTFSession(env, modelBytes), inputs, outputs, null, false)); } var tempDirPath = Path.GetFullPath(Path.Combine(Path.GetTempPath(), RegistrationName + "_" + Guid.NewGuid())); TensorFlowUtils.CreateFolderWithAclIfNotExists(env, tempDirPath); try { var load = ctx.TryLoadBinaryStream("TFSavedModel", br => { int count = br.ReadInt32(); for (int n = 0; n < count; n++) { string relativeFile = br.ReadString(); long fileLength = br.ReadInt64(); string fullFilePath = Path.Combine(tempDirPath, relativeFile); string fullFileDir = Path.GetDirectoryName(fullFilePath); if (fullFileDir != tempDirPath) { TensorFlowUtils.CreateFolderWithAclIfNotExists(env, fullFileDir); } using (var fs = new FileStream(fullFilePath, FileMode.Create, FileAccess.Write)) { long actualRead = br.BaseStream.CopyRange(fs, fileLength); env.Assert(actualRead == fileLength); } } }); return(new TensorFlowTransform(env, TensorFlowUtils.GetSession(env, tempDirPath), inputs, outputs, tempDirPath, true)); } catch (Exception) { TensorFlowUtils.DeleteFolderWithRetries(env, tempDirPath); throw; } }
public void TensorFlowInputsOutputsSchemaTest() { using (var env = new ConsoleEnvironment(seed: 1, conc: 1)) { var model_location = "mnist_model/frozen_saved_model.pb"; var schema = TensorFlowUtils.GetModelSchema(env, model_location); Assert.Equal(54, schema.ColumnCount); Assert.True(schema.TryGetColumnIndex("Placeholder", out int col)); var type = schema.GetColumnType(col).AsVector; Assert.Equal(2, type.DimCount); Assert.Equal(28, type.GetDim(0)); Assert.Equal(28, type.GetDim(1)); var metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.OpType, col); Assert.NotNull(metadataType); Assert.True(metadataType.IsText); ReadOnlyMemory <char> opType = default; schema.GetMetadata(TensorFlowUtils.OpType, col, ref opType); Assert.Equal("Placeholder", opType.ToString()); metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.InputOps, col); Assert.Null(metadataType); Assert.True(schema.TryGetColumnIndex("conv2d/Conv2D/ReadVariableOp", out col)); type = schema.GetColumnType(col).AsVector; Assert.Equal(4, type.DimCount); Assert.Equal(5, type.GetDim(0)); Assert.Equal(5, type.GetDim(1)); Assert.Equal(1, type.GetDim(2)); Assert.Equal(32, type.GetDim(3)); metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.OpType, col); Assert.NotNull(metadataType); Assert.True(metadataType.IsText); schema.GetMetadata(TensorFlowUtils.OpType, col, ref opType); Assert.Equal("Identity", opType.ToString()); metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.InputOps, col); Assert.NotNull(metadataType); VBuffer <ReadOnlyMemory <char> > inputOps = default; schema.GetMetadata(TensorFlowUtils.InputOps, col, ref inputOps); Assert.Equal(1, inputOps.Length); Assert.Equal("conv2d/kernel", inputOps.Values[0].ToString()); Assert.True(schema.TryGetColumnIndex("conv2d/Conv2D", out col)); type = schema.GetColumnType(col).AsVector; Assert.Equal(3, type.DimCount); Assert.Equal(28, type.GetDim(0)); Assert.Equal(28, type.GetDim(1)); Assert.Equal(32, type.GetDim(2)); metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.OpType, col); Assert.NotNull(metadataType); Assert.True(metadataType.IsText); schema.GetMetadata(TensorFlowUtils.OpType, col, ref opType); Assert.Equal("Conv2D", opType.ToString()); metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.InputOps, col); Assert.NotNull(metadataType); schema.GetMetadata(TensorFlowUtils.InputOps, col, ref inputOps); Assert.Equal(2, inputOps.Length); Assert.Equal("reshape/Reshape", inputOps.Values[0].ToString()); Assert.Equal("conv2d/Conv2D/ReadVariableOp", inputOps.Values[1].ToString()); Assert.True(schema.TryGetColumnIndex("Softmax", out col)); type = schema.GetColumnType(col).AsVector; Assert.Equal(1, type.DimCount); Assert.Equal(10, type.GetDim(0)); metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.OpType, col); Assert.NotNull(metadataType); Assert.True(metadataType.IsText); schema.GetMetadata(TensorFlowUtils.OpType, col, ref opType); Assert.Equal("Softmax", opType.ToString()); metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.InputOps, col); Assert.NotNull(metadataType); schema.GetMetadata(TensorFlowUtils.InputOps, col, ref inputOps); Assert.Equal(1, inputOps.Length); Assert.Equal("sequential/dense_1/BiasAdd", inputOps.Values[0].ToString()); model_location = "model_matmul/frozen_saved_model.pb"; schema = TensorFlowUtils.GetModelSchema(env, model_location); char name = 'a'; for (int i = 0; i < schema.ColumnCount; i++) { Assert.Equal(name.ToString(), schema.GetColumnName(i)); type = schema.GetColumnType(i).AsVector; Assert.Equal(2, type.DimCount); Assert.Equal(2, type.GetDim(0)); Assert.Equal(2, type.GetDim(1)); name++; } } }
/// <summary> /// Convenience constructor for public facing API. /// </summary> /// <param name="env">Host Environment.</param> /// <param name="input">Input <see cref="IDataView"/>. This is the output from previous transform or loader.</param> /// <param name="model">Path to the TensorFlow model. </param> /// <param name="names">Name of the output column(s). Keep it same as in the Tensorflow model.</param> /// <param name="source">Name of the input column(s). Keep it same as in the Tensorflow model.</param> public static IDataTransform Create(IHostEnvironment env, IDataView input, string model, string[] names, string[] source) { return(new TensorFlowTransform(env, TensorFlowUtils.GetSession(env, model), source, names, TensorFlowUtils.IsSavedModel(env, model) ? model : null, false).MakeDataTransform(input)); }
public TensorFlowEstimator(IHostEnvironment env, string model, string[] inputs, string[] outputs) : this(env, new TensorFlowTransform(env, TensorFlowUtils.GetSession(env, model), inputs, outputs, TensorFlowUtils.IsSavedModel(env, model) ? model : null, false)) { }
// Factory method for SignatureDataTransform. private static IDataTransform Create(IHostEnvironment env, Arguments args, IDataView input) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(args, nameof(args)); env.CheckValue(input, nameof(input)); env.CheckValue(args.InputColumns, nameof(args.InputColumns)); env.CheckValue(args.OutputColumns, nameof(args.OutputColumns)); return(new TensorFlowTransform(env, TensorFlowUtils.GetSession(env, args.Model), args.InputColumns, args.OutputColumns, TensorFlowUtils.IsSavedModel(env, args.Model) ? args.Model : null, false).MakeDataTransform(input)); }
/// <summary> /// Get <see cref="DataViewSchema"/> for only those nodes which are marked "Placeholder" in the TensorFlow model. /// This method is convenient for exploring the model input(s) in case TensorFlow graph is very large. /// </summary> public DataViewSchema GetInputSchema() { return(TensorFlowUtils.GetModelSchema(_env, Session.graph, TreatOutputAsBatched, "Placeholder")); }
/// <summary> /// Load TensorFlow model into memory. This is the convenience method that allows the model to be loaded once and subsequently use it for querying schema and creation of /// <see cref="TensorFlowEstimator"/> using <see cref="TensorFlowModel.ScoreTensorFlowModel(string, string, bool)"/>. /// usage of this API requires additional NuGet dependencies on TensorFlow redist, see linked document for more information. /// <see cref="TensorFlowModel"/> also holds references to unmanaged resources that need to be freed either with an explicit /// call to Dispose() or implicitly by declaring the variable with the "using" syntax/> /// /// <format type="text/markdown"> /// <![CDATA[ /// [!include[io](~/../docs/samples/docs/api-reference/tensorflow-usage.md)] /// ]]> /// </format> /// </summary> /// <param name="catalog">The transform's catalog.</param> /// <param name="modelLocation">Location of the TensorFlow model.</param> /// <param name="treatOutputAsBatched">If the first dimension of the output is unknown, should it be treated as batched or not.</param> /// <example> /// <format type="text/markdown"> /// <![CDATA[ /// [!code-csharp[LoadTensorFlowModel](~/../docs/samples/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlow/TextClassification.cs)] /// ]]> /// </format> /// </example> public static TensorFlowModel LoadTensorFlowModel(this ModelOperationsCatalog catalog, string modelLocation, bool treatOutputAsBatched) => TensorFlowUtils.LoadTensorFlowModel(CatalogUtils.GetEnvironment(catalog), modelLocation, treatOutputAsBatched);