void TestOldSavingAndLoading() { var modelFile = "model_matmul/frozen_saved_model.pb"; var dataView = ComponentCreation.CreateDataView(Env, new List <TestData>(new TestData[] { new TestData() { a = new[] { 1.0f, 2.0f, 3.0f, 4.0f }, b = new[] { 1.0f, 2.0f, 3.0f, 4.0f } }, new TestData() { a = new[] { 2.0f, 2.0f, 2.0f, 2.0f }, b = new[] { 3.0f, 3.0f, 3.0f, 3.0f } }, new TestData() { a = new[] { 5.0f, 6.0f, 10.0f, 12.0f }, b = new[] { 10.0f, 8.0f, 6.0f, 6.0f } } })); var est = new TensorFlowEstimator(Env, modelFile, new[] { "a", "b" }, new[] { "c" }); var transformer = est.Fit(dataView); var result = transformer.Transform(dataView); var resultRoles = new RoleMappedData(result); using (var ms = new MemoryStream()) { TrainUtils.SaveModel(Env, Env.Start("saving"), ms, null, resultRoles); ms.Position = 0; var loadedView = ModelFileUtils.LoadTransforms(Env, dataView, ms); ValidateTensorFlowTransformer(loadedView); } }
void TestSimpleCase() { var modelFile = "model_matmul/frozen_saved_model.pb"; var dataView = ComponentCreation.CreateDataView(Env, new List <TestData>(new TestData[] { new TestData() { a = new[] { 1.0f, 2.0f, 3.0f, 4.0f }, b = new[] { 1.0f, 2.0f, 3.0f, 4.0f } }, new TestData() { a = new[] { 2.0f, 2.0f, 2.0f, 2.0f }, b = new[] { 3.0f, 3.0f, 3.0f, 3.0f } } })); var xyData = new List <TestDataXY> { new TestDataXY() { A = new float[4], B = new float[4] } }; var stringData = new List <TestDataDifferntType> { new TestDataDifferntType() { a = new string[4], b = new string[4] } }; var sizeData = new List <TestDataSize> { new TestDataSize() { a = new float[2], b = new float[2] } }; var pipe = new TensorFlowEstimator(Env, modelFile, new[] { "a", "b" }, new[] { "c" }); var invalidDataWrongNames = ComponentCreation.CreateDataView(Env, xyData); var invalidDataWrongTypes = ComponentCreation.CreateDataView(Env, stringData); var invalidDataWrongVectorSize = ComponentCreation.CreateDataView(Env, sizeData); TestEstimatorCore(pipe, dataView, invalidInput: invalidDataWrongNames); TestEstimatorCore(pipe, dataView, invalidInput: invalidDataWrongTypes); pipe.GetOutputSchema(SchemaShape.Create(invalidDataWrongVectorSize.Schema)); try { pipe.Fit(invalidDataWrongVectorSize); Assert.False(true); } catch (ArgumentOutOfRangeException) { } catch (InvalidOperationException) { } }
static void Main(string[] args) { Console.Write("Image directory: "); var imagePath = Console.ReadLine(); var context = new MLContext(); // this is necessary for calling Fit() later var fakeData = context.Data.ReadFromEnumerable(new List <InputModel>()); // loads images from disk var imageLoadingEstimator = new ImageLoadingEstimator( context, imagePath, ("ImageData", nameof(InputModel.Path))); // resizes images to required size var imageResizingEstimator = new ImageResizingEstimator( context, "ImageResized", 64, 64, "ImageData", ImageResizerTransformer.ResizingKind.IsoPad); // transforms images to float vectors var imagePixelExtractingEstimator = new ImagePixelExtractingEstimator(context, new ImagePixelExtractorTransformer.ColumnInfo( "input_1", "ImageResized", ImagePixelExtractorTransformer.ColorBits.Rgb, interleave: true, scale: 1 / 255f)); // loads the TF model from disk var tensorFlowEstimator = new TensorFlowEstimator( context, new[] { "final_layer/Sigmoid" }, new[] { "input_1" }, "Model/MalariaModel.pb"); // create a ML pipeline var pipeline = imageLoadingEstimator .Append(imageResizingEstimator) .Append(imagePixelExtractingEstimator) .Append(tensorFlowEstimator); // fit, otherwise we can't create a prediction engine var model = pipeline.Fit(fakeData); var predictor = model.CreatePredictionEngine <InputModel, OutputModel>(context); foreach (var file in Directory.GetFiles(imagePath, "*.png").Take(100).ToList()) { var newName = ImageUtilities.ResizeImage(file, 64, 64, imagePath); var prediction = predictor.Predict(new InputModel { Path = newName }); Console.WriteLine($"{Path.GetFileName(file)}: {(prediction.IsInfected() ? "Infected" : "Healthy")}"); } Console.ReadLine(); }