/// <summary> /// Evaluate the model. /// </summary> /// <param name="featureData">The data to evaluate the model on</param> /// <param name="model">The model to use (defaults to trained model)</param> /// <returns>The output of the model</returns> public IList <IList <float> > Evaluate(float[][] featureData, CNTK.Function model = null) { // get the current model if (model == null) { model = this.Model; } // get the current batch var featureBatch = (SequenceLength == 1) ? features.GetBatch(featureData, 0, featureData.Length) : features.GetSequenceBatch(SequenceLength, featureData, 0, featureData.Length); // evaluate the model var inputs = new Dictionary <CNTK.Variable, CNTK.Value>() { { features, featureBatch } }; var outputs = new Dictionary <CNTK.Variable, CNTK.Value>() { { model.Output, null } }; model.Evaluate(inputs, outputs, NetUtil.CurrentDevice); // return result var result = outputs[model.Output]; var outputData = result.GetDenseData <float>(model.Output); return(outputData); }
/// <summary> /// Evaluate on a collection of batches. /// </summary> /// <returns>The final value of the metric after evaluation.</returns> double EvaluateBatches() { // return if we have no evaluator if (evaluator == null) { return(0.0); } // loop through each batch of data var metric = 0.0; validationFeatures.Batch( BatchSize, (data, begin, end) => { // get current batch for testing var featureBatch = (SequenceLength == 1) ? features.GetBatch(validationFeatures, begin, end) : features.GetSequenceBatch(SequenceLength, validationFeatures, begin, end); var labelBatch = labels.GetBatch(validationLabels, begin, end); // return if we have no validation features if (validationFeatures.Length == 0) { return; } // test the network on the batch var minibatch_metric = evaluator.TestBatch( new[] { (features, featureBatch), (labels, labelBatch) });
public static void Main(string[] args) { Console.WriteLine("Loading data...."); // unzip archive if (!System.IO.File.Exists("x_train.bin")) { DataUtil.Unzip(@"..\..\..\..\..\boston_housing.zip", "."); } // load training and test data var training_data = DataUtil.LoadBinary <float>("x_train.bin", 404, 13); var test_data = DataUtil.LoadBinary <float>("x_test.bin", 102, 13); var training_labels = DataUtil.LoadBinary <float>("y_train.bin", 404); var test_labels = DataUtil.LoadBinary <float>("y_test.bin", 102); // report results Console.WriteLine($"{training_data.GetLength(0)} training houses loaded"); Console.WriteLine($"{test_data.GetLength(0)} test houses loaded"); // declare some variables var numFolds = 4; var maxEpochs = 50; var batchSize = 16; var batchCount = 0; // partition the training data using KFolds var lines = new List <List <double> >(); Console.WriteLine("Training the neural network...."); training_data.Index().Shuffle().KFold(numFolds, (foldIndex, trainingIndices, validationIndices) => { var line = new List <double>(); Console.WriteLine($"KFold partition {foldIndex + 1}/{numFolds}"); // create a new network var network = CreateNetwork(); // train the network during several epochs for (int epoch = 0; epoch < maxEpochs; epoch++) { // train the network using random batches var trainingError = 0.0; batchCount = 0; trainingIndices.Batch(batchSize, (indices, begin, end) => { // get the current batch var featureBatch = features.GetBatch(training_data, indices, begin, end); var labelBatch = labels.GetBatch(training_labels, indices, begin, end); // train the network on the batch var result = trainer.TrainBatch( new[] { (features, featureBatch), (labels, labelBatch) },