public void GenericXmlDataContractSerializer_Deserialize_Dont_PreserveObjectReferences() { var sut = new GenericXmlDataContractSerializer(preserveObjectReferences: false); var reader = new StringReader(m_serializationString_DontPreserveObjectReferences); var actual = sut.Deserialize <Dictionary <string, int> >(() => reader); CollectionAssert.AreEqual(m_serializationData, actual); }
public void GenericXmlDataContractSerializer_Deserialize() { var sut = new GenericXmlDataContractSerializer(); var reader = new StringReader(m_serializationString); var actual = sut.Deserialize <Dictionary <string, int> >(() => reader); CollectionAssert.AreEqual(m_serializationData, actual); }
public void GenericXmlDataContractSerializer_Serialize_Dont_PreserveObjectReferences() { var writer = new StringWriter(); var sut = new GenericXmlDataContractSerializer(preserveObjectReferences: false); sut.Serialize(m_serializationData, () => writer); Assert.AreEqual(m_serializationString_DontPreserveObjectReferences, writer.ToString()); }
public void GenericXmlDataContractSerializer_Serialize() { var writer = new StringWriter(); var sut = new GenericXmlDataContractSerializer(); sut.Serialize(m_serializationData, () => writer); Assert.AreEqual(m_serializationString, writer.ToString()); }
public void FeatureNormalization_Normalize() { // Use StreamReader(filepath) when running from filesystem var parser = new CsvParser(() => new StringReader(Resources.winequality_white)); var targetName = "quality"; // read feature matrix (all columns different from the targetName) var observations = parser.EnumerateRows(c => c != targetName) .ToF64Matrix(); // create minmax normalizer (normalizes each feature from 0.0 to 1.0) var minMaxTransformer = new MinMaxTransformer(0.0, 1.0); // transforms features using the feature normalization transform minMaxTransformer.Transform(observations, observations); // read targets var targets = parser.EnumerateRows(targetName) .ToF64Vector(); // Create neural net. var net = new NeuralNet(); net.Add(new InputLayer(observations.ColumnCount)); net.Add(new SquaredErrorRegressionLayer()); // Create regression learner. var learner = new RegressionNeuralNetLearner(net, new SquareLoss()); // learns a neural net regression model. var model = learner.Learn(observations, targets); // serializer for saving the MinMaxTransformer var serializer = new GenericXmlDataContractSerializer(); // Serialize transform for use with the model. // Replace this with StreamWriter for use with file system. var data = new StringBuilder(); var writer = new StringWriter(data); serializer.Serialize(minMaxTransformer, () => writer); // Deserialize transform for use with the model. // Replace this with StreamReader for use with file system. var reader = new StringReader(data.ToString()); var deserializedMinMaxTransform = serializer.Deserialize <MinMaxTransformer>(() => reader); // Normalize observation and predict using the model. var normalizedObservation = deserializedMinMaxTransform.Transform(observations.Row(0)); var prediction = model.Predict(normalizedObservation); Trace.WriteLine($"Prediction: {prediction}"); }
public void RandomForest_Default_Parameters_Save_Load_Model_Using_Serializer() { #region read and split data // Use StreamReader(filepath) when running from filesystem var parser = new CsvParser(() => new StringReader(Resources.winequality_white)); var targetName = "quality"; // read feature matrix var observations = parser.EnumerateRows(c => c != targetName) .ToF64Matrix(); // read regression targets var targets = parser.EnumerateRows(targetName) .ToF64Vector(); // creates training test splitter, // Since this is a regression problem, we use the random training/test set splitter. // 30 % of the data is used for the test set. var splitter = new RandomTrainingTestIndexSplitter <double>(trainingPercentage: 0.7, seed: 24); var trainingTestSplit = splitter.SplitSet(observations, targets); var trainSet = trainingTestSplit.TrainingSet; var testSet = trainingTestSplit.TestSet; #endregion // create learner with default parameters var learner = new RegressionRandomForestLearner(trees: 100); // learn model with found parameters var model = learner.Learn(trainSet.Observations, trainSet.Targets); // predict the training and test set. var trainPredictions = model.Predict(trainSet.Observations); var testPredictions = model.Predict(testSet.Observations); // since this is a regression problem we are using square error as metric // for evaluating how well the model performs. var metric = new MeanSquaredErrorRegressionMetric(); // measure the error on training and test set. var trainError = metric.Error(trainSet.Targets, trainPredictions); var testError = metric.Error(testSet.Targets, testPredictions); TraceTrainingAndTestError(trainError, testError); //Save/load model as xml, in the file system use new StreamWriter(filePath); var xmlSerializer = new GenericXmlDataContractSerializer(); var savedModelXml = new StringWriter(); xmlSerializer .Serialize <IPredictorModel <double> >(model, () => savedModelXml); var loadedModelXml = xmlSerializer .Deserialize <IPredictorModel <double> >(() => new StringReader(savedModelXml.ToString())); //Save/load model as binary, in the file system use new StreamWriter(filePath); var binarySerializer = new GenericBinarySerializer(); var savedModelBinary = new StringWriter(); binarySerializer .Serialize <IPredictorModel <double> >(model, () => savedModelBinary); var loadedModelBinary = binarySerializer .Deserialize <IPredictorModel <double> >(() => new StringReader(savedModelBinary.ToString())); }