public void RegressionNeuralNetModel_Save() { var numberOfObservations = 500; var numberOfFeatures = 5; var random = new Random(32); var observations = new F64Matrix(numberOfObservations, numberOfFeatures); observations.Map(() => random.NextDouble()); var targets = Enumerable.Range(0, numberOfObservations) .Select(i => (double)random.NextDouble()).ToArray(); var net = new NeuralNet(); net.Add(new InputLayer(numberOfFeatures)); net.Add(new DenseLayer(10)); net.Add(new SquaredErrorRegressionLayer()); var learner = new RegressionNeuralNetLearner(net, new AccuracyLoss()); var sut = learner.Learn(observations, targets); // save model. var writer = new StringWriter(); sut.Save(() => writer); // load model and assert prediction results. sut = RegressionNeuralNetModel.Load(() => new StringReader(writer.ToString())); var predictions = sut.Predict(observations); var evaluator = new MeanSquaredErrorRegressionMetric(); var actual = evaluator.Error(targets, predictions); Assert.AreEqual(0.0871547675024143, actual, 0.0001); }
public void RegressionNeuralNetLearner_Learn_Early_Stopping() { var numberOfObservations = 500; var numberOfFeatures = 5; var random = new Random(32); F64Matrix observations; double[] targets; CreateData(numberOfObservations, numberOfFeatures, random, out observations, out targets); F64Matrix validationObservations; double[] validationTargets; CreateData(numberOfObservations, numberOfFeatures, random, out validationObservations, out validationTargets); var net = new NeuralNet(); net.Add(new InputLayer(numberOfFeatures)); net.Add(new DenseLayer(10)); net.Add(new SquaredErrorRegressionLayer()); var sut = new RegressionNeuralNetLearner(net, new SquareLoss(), 0.01, 150); var model = sut.Learn(observations, targets, validationObservations, validationTargets); var validationPredictions = model.Predict(validationObservations); var evaluator = new MeanSquaredErrorRegressionMetric(); var actual = evaluator.Error(validationTargets, validationPredictions); Assert.AreEqual(0.093500629562319859, actual, 0.0001); }
public void SharpLearning_Example() { // Load data var(observations, targets) = DataSetUtilities.LoadWinequalityWhite(); // transform data for neural net var transform = new MinMaxTransformer(0.0, 1.0); transform.Transform(observations, observations); var featureCount = observations.ColumnCount; // define the neural net. var net = new NeuralNet(); net.Add(new InputLayer(inputUnits: featureCount)); net.Add(new DenseLayer(32, Activation.Relu)); net.Add(new DenseLayer(32, Activation.Relu)); net.Add(new SquaredErrorRegressionLayer()); // using only 10 iteration to make the example run faster. // using square error as error metric. This is only used for reporting progress. var learner = new RegressionNeuralNetLearner(net, iterations: 10, loss: new SquareLoss(), optimizerMethod: OptimizerMethod.Adam); var cv = new RandomCrossValidation <double>(10, seed: 232); var predictions = cv.CrossValidate(learner, observations, targets); Trace.WriteLine(FormatErrorString(targets, predictions)); }
public void RegressionNeuralNetLearner_Learn() { var numberOfObservations = 500; var numberOfFeatures = 5; var random = new Random(32); F64Matrix observations; double[] targets; CreateData(numberOfObservations, numberOfFeatures, random, out observations, out targets); var net = new NeuralNet(); net.Add(new InputLayer(numberOfFeatures)); net.Add(new DenseLayer(10)); net.Add(new SquaredErrorRegressionLayer()); var sut = new RegressionNeuralNetLearner(net, new SquareLoss()); var model = sut.Learn(observations, targets); var predictions = model.Predict(observations); var evaluator = new MeanSquaredErrorRegressionMetric(); var actual = evaluator.Error(targets, predictions); Assert.AreEqual(0.0871547675024143, actual, 0.0001); }
// Start is called before the first frame update void Start() { net = new NeuralNet(); net.Add(new InputLayer(2)); net.Add(new DenseLayer(8)); net.Add(new DenseLayer(8)); net.Add(new SquaredErrorRegressionLayer()); learner = new RegressionNeuralNetLearner(net, new SquareLoss(), iterations: 1, batchSize: 1); }
public void RegressionNeuralNetLearner_Constructor_Throw_On_Wrong_OutputLayerType() { var net = new NeuralNet(); net.Add(new InputLayer(10)); net.Add(new DenseLayer(10)); net.Add(new SvmLayer(10)); var sut = new RegressionNeuralNetLearner(net, new AccuracyLoss()); }
private ILearner <double> GetNeuralnet(int numberOfFeatures) { var net = new NeuralNet(); net.Add(new InputLayer(inputUnits: numberOfFeatures)); net.Add(new DenseLayer(numberOfFeatures, Activation.Relu)); net.Add(new SquaredErrorRegressionLayer()); var learner = new RegressionNeuralNetLearner(net, learningRate: 0.001, iterations: 2000, loss: new SquareLoss(), batchSize: 180, optimizerMethod: OptimizerMethod.Adam); return(learner); }
public void FeatureNormalization_Normalize() { // Use StreamReader(filepath) when running from filesystem var parser = new CsvParser(() => new StringReader(Resources.winequality_white)); var targetName = "quality"; // read feature matrix (all columns different from the targetName) var observations = parser.EnumerateRows(c => c != targetName) .ToF64Matrix(); // create minmax normalizer (normalizes each feature from 0.0 to 1.0) var minMaxTransformer = new MinMaxTransformer(0.0, 1.0); // transforms features using the feature normalization transform minMaxTransformer.Transform(observations, observations); // read targets var targets = parser.EnumerateRows(targetName) .ToF64Vector(); // Create neural net. var net = new NeuralNet(); net.Add(new InputLayer(observations.ColumnCount)); net.Add(new SquaredErrorRegressionLayer()); // Create regression learner. var learner = new RegressionNeuralNetLearner(net, new SquareLoss()); // learns a neural net regression model. var model = learner.Learn(observations, targets); // serializer for saving the MinMaxTransformer var serializer = new GenericXmlDataContractSerializer(); // Serialize transform for use with the model. // Replace this with StreamWriter for use with file system. var data = new StringBuilder(); var writer = new StringWriter(data); serializer.Serialize(minMaxTransformer, () => writer); // Deserialize transform for use with the model. // Replace this with StreamReader for use with file system. var reader = new StringReader(data.ToString()); var deserializedMinMaxTransform = serializer.Deserialize <MinMaxTransformer>(() => reader); // Normalize observation and predict using the model. var normalizedObservation = deserializedMinMaxTransform.Transform(observations.Row(0)); var prediction = model.Predict(normalizedObservation); Trace.WriteLine($"Prediction: {prediction}"); }
public void Regression_Standard_Neural_Net_FeatureTransform_Normalization() { #region Read Data // Use StreamReader(filepath) when running from filesystem var parser = new CsvParser(() => new StringReader(Resources.winequality_white)); var targetName = "quality"; // read feature matrix var observations = parser.EnumerateRows(c => c != targetName) .ToF64Matrix(); // read classification targets var targets = parser.EnumerateRows(targetName) .ToF64Vector(); #endregion // transform pixel values to be between 0 and 1 // and shift each feature to have a mean value of zero. var minMaxTransformer = new MinMaxTransformer(0.0, 1.0); var meanZeroTransformer = new MeanZeroFeatureTransformer(); minMaxTransformer.Transform(observations, observations); meanZeroTransformer.Transform(observations, observations); var numberOfFeatures = observations.ColumnCount; // define the neural net. var net = new NeuralNet(); net.Add(new InputLayer(inputUnits: numberOfFeatures)); net.Add(new DropoutLayer(0.2)); net.Add(new DenseLayer(800, Activation.Relu)); net.Add(new DropoutLayer(0.5)); net.Add(new DenseLayer(800, Activation.Relu)); net.Add(new DropoutLayer(0.5)); net.Add(new SquaredErrorRegressionLayer()); // using only 10 iteration to make the example run faster. // using square error as error metric. This is only used for reporting progress. var learner = new RegressionNeuralNetLearner(net, iterations: 10, loss: new SquareLoss()); var model = learner.Learn(observations, targets); var metric = new MeanSquaredErrorRegressionMetric(); var predictions = model.Predict(observations); Trace.WriteLine("Training Error: " + metric.Error(targets, predictions)); }
private ILearner <double> GetNeuralNet(int numberOfFeatures, int batchSize, int?iterations = null) { var net = new NeuralNet(); net.Add(new InputLayer(inputUnits: numberOfFeatures)); net.Add(new DenseLayer(numberOfFeatures, Activation.Relu) { BatchNormalization = true }); net.Add(new SquaredErrorRegressionLayer()); var learner = new RegressionNeuralNetLearner(net, learningRate: 0.01, iterations: iterations ?? _iterations, loss: new SquareLoss(), batchSize: batchSize, optimizerMethod: OptimizerMethod.RMSProp); return(learner); }
/// <summary> /// Predição de Floresta Aleatória e Rede Neural /// </summary> public void RegressionLearner_Learn_And_Predict() { #region Treinamento da Floresta Aleatória var parser = new CsvParser(() => new StringReader(treinamento)); var targetName = "T"; var observations = parser.EnumerateRows(c => c != targetName) .ToF64Matrix(); var targets = parser.EnumerateRows(targetName) .ToF64Vector(); UltimaObservacao = new double[] { observations[observations.RowCount - 1, 0], observations[observations.RowCount - 1, 2], observations[observations.RowCount - 1, 3], observations[observations.RowCount - 1, 4], observations[observations.RowCount - 1, 5], targets[targets.Count() - 1] }; var learner = new RegressionRandomForestLearner(trees: 500); model = learner.Learn(observations, targets); #endregion #region Teste da Floresta Aleatória parser = new CsvParser(() => new StringReader(teste)); var observationsTeste = parser.EnumerateRows(c => c != targetName) .ToF64Matrix(); var targetsTeste = parser.EnumerateRows(targetName) .ToF64Vector(); // predict the training and test set. var trainPredictions = model.Predict(observations); var testPredictions = model.Predict(observationsTeste); // create the metric var metric = new MeanSquaredErrorRegressionMetric(); // measure the error on training and test set. trainError = metric.Error(targets, trainPredictions); testError = metric.Error(targetsTeste, testPredictions); #endregion #region Treinamento da Rede Neural var net = new NeuralNet(); net.Add(new InputLayer(6)); net.Add(new DropoutLayer(0.2)); net.Add(new DenseLayer(800, Activation.Relu)); net.Add(new DropoutLayer(0.5)); net.Add(new DenseLayer(800, Activation.Relu)); net.Add(new DropoutLayer(0.5)); net.Add(new SquaredErrorRegressionLayer()); var learnernet = new RegressionNeuralNetLearner(net, iterations: 500, loss: new SquareLoss()); modelnet = learnernet.Learn(observations, targets); #endregion #region Teste da Rede Neural trainPredictions = modelnet.Predict(observations); testPredictions = modelnet.Predict(observationsTeste); trainErrorNet = metric.Error(targets, trainPredictions); testErrorNet = metric.Error(targetsTeste, testPredictions); #endregion #region Treinamento Ada var learnerada = new RegressionAdaBoostLearner(maximumTreeDepth: 35, iterations: 2000, learningRate: 0.1); modelada = learnerada.Learn(observations, targets); #endregion #region Teste Ada trainPredictions = modelada.Predict(observations); testPredictions = modelada.Predict(observationsTeste); trainErrorAda = metric.Error(targets, trainPredictions); testErrorAda = metric.Error(targetsTeste, testPredictions); string stargets = ""; string strainPredictions = ""; string stargetsTeste = ""; string stestPredictions = ""; foreach (var i in targets) { stargets += i + ";"; } foreach (var i in trainPredictions) { strainPredictions += i + ";"; } foreach (var i in targetsTeste) { stargetsTeste += i + ";"; } foreach (var i in testPredictions) { stestPredictions += i + ";"; } #endregion }
static void Main(string[] args) { string[] races = { "Black", "Hispanic", "NativeAmerican", "White" }; foreach (string race in races) { var dataTable = Utilities.GetDataTable( $@"SELECT [StateId] ,[Year] ,[Subsidy] ,[Married] ,[AverageAge] ,[Male] ,[{race}] ,[NonRelative] ,[AverageMonths] FROM[AdoptionStatistics].[dbo].[ObservationsByStateYear]", connectionString); // do one hot encoding before we split the table up into training and test // otherwise we will have a problem if the StateId column has different number of distinct values, // which it actually does for 2016: data for Puerto Rico 2016 is not available dataTable = Utilities.OneHotEncoder(dataTable, new string[] { "StateId" }); // get training data, all years except most recent, 2016 var trainingRows = dataTable.Select("[Year] <> 2016"); // get most recent year, 2016, for evaluating model var testRows = dataTable.Select("[Year] = 2016"); (var observationsTraining, var targetsTraining) = Utilities.ObservationsFromDataRows(trainingRows, "AverageMonths"); (var observationsTest, var targetsTest) = Utilities.ObservationsFromDataRows(testRows, "AverageMonths"); // create neural net var adoptStatistics = new NeuralNet(); adoptStatistics.Add(new InputLayer(inputUnits: dataTable.Columns.Count - 1)); adoptStatistics.Add(new DenseLayer(1200, Activation.Relu)); adoptStatistics.Add(new DenseLayer(1200, Activation.Relu)); adoptStatistics.Add(new DenseLayer(1200, Activation.Relu)); adoptStatistics.Add(new DenseLayer(1200, Activation.Relu)); adoptStatistics.Add(new SquaredErrorRegressionLayer()); // create learner, set iterations var learner = new RegressionNeuralNetLearner(adoptStatistics, iterations: 1000, loss: new SquareLoss()); var model = learner.Learn(observationsTraining, targetsTraining); DirectoryInfo directory = new DirectoryInfo(outputDirectory); if (!directory.Exists) { directory.Create(); } string modelName = $"adoptModel_{race}_{DateTime.Now:yy.MM.dd.HH.mm.ss}"; model.Save(() => new StreamWriter(Path.Combine(directory.FullName, $"{modelName}.xml"))); var metric = new MeanSquaredErrorRegressionMetric(); var predictionsTraining = model.Predict(observationsTraining); var errorTraining = metric.Error(targetsTraining, predictionsTraining); Console.WriteLine($"Training Error for {modelName}: {errorTraining}"); var predictionsTest = model.Predict(observationsTest); var errorTest = metric.Error(targetsTest, predictionsTest); Console.WriteLine($"Evaluation Error for {modelName}: {errorTest}"); } }