public void TrainModelTest(string trainFile, int chunkSize, int numOfChunk) { var model = new TrainModel(numOfChunk, 2, 64, 0.1); var dataset = new ClassifiedDataset(trainFile, 64, 2); int count = 0; foreach (var chunk in dataset.GetClassifiedVectorsInChunks(chunkSize)) { model.Train(chunk, count); if (++count == numOfChunk) { break; } } var weights = model.GetWeights(); Assert.That(weights.Length, Is.EqualTo(2)); foreach (var w in weights) { Assert.IsNotNull(w); } }
public void TrainModelAndTestModelTest(string trainFile, string testFile, int chunkSize, int numOfChunk) { var trainModel = new TrainModel(numOfChunk, 2, 64, 0.1); var trainDataset = new ClassifiedDataset(trainFile, 64, 2); var testModel = new TestModel(numOfChunk, 2, 0.1, trainModel.GetWeights()); var testDataset = new UnclassifiedDataset(testFile, 64); int count = 0; foreach (var chunk in trainDataset.GetClassifiedVectorsInChunks(chunkSize)) { trainModel.Train(chunk, count); if (++count == numOfChunk) { break; } } var results = testModel.Test(testDataset.GetDataVectors().Select(v => v.FeatureVector).ToArray(), 0); }
public void GetClassifiedVectorsInChunksTest(string filePath, int chunkSize, int expectedNumOfChunk, int expectedLastChunkSize) { ClassifiedDataset dataset = new ClassifiedDataset(filePath, 64, 2); int actualNumOfChunk = 0; foreach (var chunk in dataset.GetClassifiedVectorsInChunks(chunkSize)) { actualNumOfChunk++; Assert.That(chunk.Length, Is.EqualTo(2)); if (actualNumOfChunk == expectedNumOfChunk) { Assert.That(chunk.Sum(c => c.Count), Is.EqualTo(expectedLastChunkSize)); } else { Assert.That(chunk.Sum(c => c.Count), Is.EqualTo(chunkSize)); } } Assert.That(expectedNumOfChunk, Is.EqualTo(actualNumOfChunk)); }