Пример #1
0
        [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // TensorFlow is 64-bit only
        public void TensorFlowGettingSchemaMultipleTimes()
        {
            var modelLocation = "cifar_saved_model";
            var mlContext     = new MLContext(seed: 1, conc: 1);

            for (int i = 0; i < 10; i++)
            {
                var schema = TensorFlowUtils.GetModelSchema(mlContext, modelLocation);
                Assert.NotNull(schema);
            }
        }
Пример #2
0
        [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // TensorFlow is 64-bit only
        public void TensorFlowTransformCifarSavedModel()
        {
            var modelLocation   = "cifar_saved_model";
            var mlContext       = new MLContext(seed: 1, conc: 1);
            var loadModelSchema = TensorFlowUtils.GetModelSchema(mlContext, modelLocation);

            Assert.Equal(335, loadModelSchema.Count);
            var tensorFlowModel = TensorFlowUtils.LoadTensorFlowModel(mlContext, modelLocation);
            var schema          = tensorFlowModel.GetInputSchema();

            Assert.True(schema.TryGetColumnIndex("Input", out int column));
            var type        = (VectorType)schema[column].Type;
            var imageHeight = type.Dimensions[0];
            var imageWidth  = type.Dimensions[1];

            var dataFile    = GetDataPath("images/images.tsv");
            var imageFolder = Path.GetDirectoryName(dataFile);
            var data        = mlContext.Data.ReadFromTextFile(dataFile, columns: new[]
            {
                new TextLoader.Column("ImagePath", DataKind.TX, 0),
                new TextLoader.Column("Name", DataKind.TX, 1),
            }
                                                              );
            var       images  = new ImageLoaderTransformer(mlContext, imageFolder, ("ImagePath", "ImageReal")).Transform(data);
            var       cropped = new ImageResizerTransformer(mlContext, "ImageReal", "ImageCropped", imageWidth, imageHeight).Transform(images);
            var       pixels  = new ImagePixelExtractorTransformer(mlContext, "ImageCropped", "Input", interleave: true).Transform(cropped);
            IDataView trans   = new TensorFlowTransformer(mlContext, tensorFlowModel, "Input", "Output").Transform(pixels);

            trans.Schema.TryGetColumnIndex("Output", out int output);
            using (var cursor = trans.GetRowCursor(col => col == output))
            {
                var buffer  = default(VBuffer <float>);
                var getter  = cursor.GetGetter <VBuffer <float> >(output);
                var numRows = 0;
                while (cursor.MoveNext())
                {
                    getter(ref buffer);
                    Assert.Equal(10, buffer.Length);
                    numRows += 1;
                }
                Assert.Equal(4, numRows);
            }
        }
Пример #3
0
        [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // TensorFlow is 64-bit only
        public void TensorFlowInputsOutputsSchemaTest()
        {
            var mlContext      = new MLContext(seed: 1, conc: 1);
            var model_location = "mnist_model/frozen_saved_model.pb";
            var schema         = TensorFlowUtils.GetModelSchema(mlContext, model_location);

            Assert.Equal(86, schema.Count);
            Assert.True(schema.TryGetColumnIndex("Placeholder", out int col));
            var type = (VectorType)schema[col].Type;

            Assert.Equal(2, type.Dimensions.Length);
            Assert.Equal(28, type.Dimensions[0]);
            Assert.Equal(28, type.Dimensions[1]);
            var metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowOperatorTypeKind].Type;

            Assert.NotNull(metadataType);
            Assert.True(metadataType is TextType);
            ReadOnlyMemory <char> opType = default;

            schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowOperatorTypeKind, ref opType);
            Assert.Equal("Placeholder", opType.ToString());
            metadataType = schema[col].Metadata.Schema.GetColumnOrNull(TensorFlowUtils.TensorflowUpstreamOperatorsKind)?.Type;
            Assert.Null(metadataType);

            Assert.True(schema.TryGetColumnIndex("conv2d/Conv2D/ReadVariableOp", out col));
            type = (VectorType)schema[col].Type;
            Assert.Equal(new[] { 5, 5, 1, 32 }, type.Dimensions);
            metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowOperatorTypeKind].Type;
            Assert.NotNull(metadataType);
            Assert.True(metadataType is TextType);
            schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowOperatorTypeKind, ref opType);
            Assert.Equal("Identity", opType.ToString());
            metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowUpstreamOperatorsKind].Type;
            Assert.NotNull(metadataType);
            VBuffer <ReadOnlyMemory <char> > inputOps = default;

            schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowUpstreamOperatorsKind, ref inputOps);
            Assert.Equal(1, inputOps.Length);
            Assert.Equal("conv2d/kernel", inputOps.GetValues()[0].ToString());

            Assert.True(schema.TryGetColumnIndex("conv2d/Conv2D", out col));
            type = (VectorType)schema[col].Type;
            Assert.Equal(new[] { 28, 28, 32 }, type.Dimensions);
            metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowOperatorTypeKind].Type;
            Assert.NotNull(metadataType);
            Assert.True(metadataType is TextType);
            schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowOperatorTypeKind, ref opType);
            Assert.Equal("Conv2D", opType.ToString());
            metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowUpstreamOperatorsKind].Type;
            Assert.NotNull(metadataType);
            schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowUpstreamOperatorsKind, ref inputOps);
            Assert.Equal(2, inputOps.Length);
            Assert.Equal("reshape/Reshape", inputOps.GetValues()[0].ToString());
            Assert.Equal("conv2d/Conv2D/ReadVariableOp", inputOps.GetValues()[1].ToString());

            Assert.True(schema.TryGetColumnIndex("Softmax", out col));
            type = (VectorType)schema[col].Type;
            Assert.Equal(new[] { 10 }, type.Dimensions);
            metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowOperatorTypeKind].Type;
            Assert.NotNull(metadataType);
            Assert.True(metadataType is TextType);
            schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowOperatorTypeKind, ref opType);
            Assert.Equal("Softmax", opType.ToString());
            metadataType = schema[col].Metadata.Schema[TensorFlowUtils.TensorflowUpstreamOperatorsKind].Type;
            Assert.NotNull(metadataType);
            schema[col].Metadata.GetValue(TensorFlowUtils.TensorflowUpstreamOperatorsKind, ref inputOps);
            Assert.Equal(1, inputOps.Length);
            Assert.Equal("sequential/dense_1/BiasAdd", inputOps.GetValues()[0].ToString());

            model_location = "model_matmul/frozen_saved_model.pb";
            schema         = TensorFlowUtils.GetModelSchema(mlContext, model_location);
            char name = 'a';

            for (int i = 0; i < schema.Count; i++)
            {
                Assert.Equal(name.ToString(), schema[i].Name);
                type = (VectorType)schema[i].Type;
                Assert.Equal(new[] { 2, 2 }, type.Dimensions);
                name++;
            }
        }
Пример #4
0
 /// <summary>
 /// Get <see cref="DataViewSchema"/> for only those nodes which are marked "Placeholder" in the TensorFlow model.
 /// This method is convenient for exploring the model input(s) in case TensorFlow graph is very large.
 /// </summary>
 public DataViewSchema GetInputSchema()
 {
     return(TensorFlowUtils.GetModelSchema(_env, Session.Graph, "Placeholder"));
 }
Пример #5
0
 /// <summary>
 /// Get <see cref="DataViewSchema"/> for complete model. Every node in the TensorFlow model will be included in the <see cref="DataViewSchema"/> object.
 /// </summary>
 public DataViewSchema GetModelSchema()
 {
     return(TensorFlowUtils.GetModelSchema(_env, Session.Graph));
 }
Пример #6
0
 /// <summary>
 /// Get <see cref="Schema"/> for complete model. Every node in the TensorFlow model will be included in the <see cref="Schema"/> object.
 /// </summary>
 internal Schema GetModelSchema()
 {
     return(TensorFlowUtils.GetModelSchema(_env, Session.Graph));
 }
Пример #7
0
        public void TensorFlowInputsOutputsSchemaTest()
        {
            using (var env = new ConsoleEnvironment(seed: 1, conc: 1))
            {
                var model_location = "mnist_model/frozen_saved_model.pb";
                var schema         = TensorFlowUtils.GetModelSchema(env, model_location);
                Assert.Equal(54, schema.ColumnCount);
                Assert.True(schema.TryGetColumnIndex("Placeholder", out int col));
                var type = schema.GetColumnType(col).AsVector;
                Assert.Equal(2, type.DimCount);
                Assert.Equal(28, type.GetDim(0));
                Assert.Equal(28, type.GetDim(1));
                var metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.OpType, col);
                Assert.NotNull(metadataType);
                Assert.True(metadataType.IsText);
                ReadOnlyMemory <char> opType = default;
                schema.GetMetadata(TensorFlowUtils.OpType, col, ref opType);
                Assert.Equal("Placeholder", opType.ToString());
                metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.InputOps, col);
                Assert.Null(metadataType);

                Assert.True(schema.TryGetColumnIndex("conv2d/Conv2D/ReadVariableOp", out col));
                type = schema.GetColumnType(col).AsVector;
                Assert.Equal(4, type.DimCount);
                Assert.Equal(5, type.GetDim(0));
                Assert.Equal(5, type.GetDim(1));
                Assert.Equal(1, type.GetDim(2));
                Assert.Equal(32, type.GetDim(3));
                metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.OpType, col);
                Assert.NotNull(metadataType);
                Assert.True(metadataType.IsText);
                schema.GetMetadata(TensorFlowUtils.OpType, col, ref opType);
                Assert.Equal("Identity", opType.ToString());
                metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.InputOps, col);
                Assert.NotNull(metadataType);
                VBuffer <ReadOnlyMemory <char> > inputOps = default;
                schema.GetMetadata(TensorFlowUtils.InputOps, col, ref inputOps);
                Assert.Equal(1, inputOps.Length);
                Assert.Equal("conv2d/kernel", inputOps.Values[0].ToString());

                Assert.True(schema.TryGetColumnIndex("conv2d/Conv2D", out col));
                type = schema.GetColumnType(col).AsVector;
                Assert.Equal(3, type.DimCount);
                Assert.Equal(28, type.GetDim(0));
                Assert.Equal(28, type.GetDim(1));
                Assert.Equal(32, type.GetDim(2));
                metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.OpType, col);
                Assert.NotNull(metadataType);
                Assert.True(metadataType.IsText);
                schema.GetMetadata(TensorFlowUtils.OpType, col, ref opType);
                Assert.Equal("Conv2D", opType.ToString());
                metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.InputOps, col);
                Assert.NotNull(metadataType);
                schema.GetMetadata(TensorFlowUtils.InputOps, col, ref inputOps);
                Assert.Equal(2, inputOps.Length);
                Assert.Equal("reshape/Reshape", inputOps.Values[0].ToString());
                Assert.Equal("conv2d/Conv2D/ReadVariableOp", inputOps.Values[1].ToString());

                Assert.True(schema.TryGetColumnIndex("Softmax", out col));
                type = schema.GetColumnType(col).AsVector;
                Assert.Equal(1, type.DimCount);
                Assert.Equal(10, type.GetDim(0));
                metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.OpType, col);
                Assert.NotNull(metadataType);
                Assert.True(metadataType.IsText);
                schema.GetMetadata(TensorFlowUtils.OpType, col, ref opType);
                Assert.Equal("Softmax", opType.ToString());
                metadataType = schema.GetMetadataTypeOrNull(TensorFlowUtils.InputOps, col);
                Assert.NotNull(metadataType);
                schema.GetMetadata(TensorFlowUtils.InputOps, col, ref inputOps);
                Assert.Equal(1, inputOps.Length);
                Assert.Equal("sequential/dense_1/BiasAdd", inputOps.Values[0].ToString());

                model_location = "model_matmul/frozen_saved_model.pb";
                schema         = TensorFlowUtils.GetModelSchema(env, model_location);
                char name = 'a';
                for (int i = 0; i < schema.ColumnCount; i++)
                {
                    Assert.Equal(name.ToString(), schema.GetColumnName(i));
                    type = schema.GetColumnType(i).AsVector;
                    Assert.Equal(2, type.DimCount);
                    Assert.Equal(2, type.GetDim(0));
                    Assert.Equal(2, type.GetDim(1));
                    name++;
                }
            }
        }
Пример #8
0
 /// <summary>
 /// Get <see cref="DataViewSchema"/> for only those nodes which are marked "Placeholder" in the TensorFlow model.
 /// This method is convenient for exploring the model input(s) in case TensorFlow graph is very large.
 /// </summary>
 public DataViewSchema GetInputSchema()
 {
     return(TensorFlowUtils.GetModelSchema(_env, Session.graph, TreatOutputAsBatched, "Placeholder"));
 }