public static Pixbuf Points(ManagedDNN network, NeuralNetworkOptions opts, double threshold, ManagedArray x, int width, int height, int f1 = 0, int f2 = 0) { var pixbuf = Common.Pixbuf(width, height, new Color(255, 255, 255)); Points(pixbuf, network, opts, threshold, x, width, height, f1, f2); return(pixbuf); }
public static void Points(Pixbuf pixbuf, ManagedDNN network, NeuralNetworkOptions opts, double threshold, ManagedArray x, int width, int height, int f1 = 0, int f2 = 0) { var m = Rows(x); minx = Double.MaxValue; maxx = Double.MinValue; miny = Double.MaxValue; maxy = Double.MinValue; f1 = f1 >= 0 && f1 < Cols(x) ? f1 : 0; f2 = f2 >= 0 && f2 < Cols(x) ? f2 : 0; for (var j = 0; j < m; j++) { minx = Math.Min(x[f1, j], minx); maxx = Math.Max(x[f1, j], maxx); miny = Math.Min(x[f2, j], miny); maxy = Math.Max(x[f2, j], maxy); } deltax = (maxx - minx) / width; deltay = (maxy - miny) / height; minx = minx - 8 * deltax; maxx = maxx + 8 * deltax; miny = miny - 8 * deltay; maxy = maxy + 8 * deltay; deltax = (maxx - minx) / width; deltay = (maxy - miny) / height; var colors = Common.Palette2(); colors.Shuffle(); var PlotOptions = opts; PlotOptions.Items = Rows(x); var classification = network.Classify(x, PlotOptions, threshold); Points(pixbuf, x, classification, colors, f1, f2); // Plot bounding box var cw = pixbuf.Width - 1; var ch = pixbuf.Height; var border = new Color(128, 128, 128); Common.Line(pixbuf, 0, 1, cw, 1, border); Common.Line(pixbuf, cw, 1, cw, ch, border); Common.Line(pixbuf, 0, ch, cw, ch, border); Common.Line(pixbuf, 0, 1, 0, ch, border); ManagedOps.Free(classification); }
public NNViewer() { NeuralNetworkOptions nno = new NeuralNetworkOptions(); nno.InputLayerSize = 5; nno.OutputLayerSize = 2; nno.HiddenLayerSizes = new int[] { 3, 4, 5, 7, 8, 20, 4 }; nno.ActivationFunction = ActivationFunction.HyperbolicTangent; network = new NeuralNetwork(nno); InitializeComponent(); }
public static Pixbuf Contour(ManagedDNN network, NeuralNetworkOptions opts, double threshold, ManagedArray x, int width, int height, int f1 = 0, int f2 = 0) { InitializeContour(11, width, height); var m = Rows(x); var xplot = new double[width]; var yplot = new double[height]; var data = new double[height, width]; minx = double.MaxValue; maxx = double.MinValue; miny = double.MaxValue; maxy = double.MinValue; f1 = f1 >= 0 && f1 < Cols(x) ? f1 : 0; f2 = f2 >= 0 && f2 < Cols(x) ? f2 : 0; for (var j = 0; j < m; j++) { minx = Math.Min(x[f1, j], minx); maxx = Math.Max(x[f1, j], maxx); miny = Math.Min(x[f2, j], miny); maxy = Math.Max(x[f2, j], maxy); } deltax = (maxx - minx) / width; deltay = (maxy - miny) / height; minx = minx - 8 * deltax; maxx = maxx + 8 * deltax; miny = miny - 8 * deltay; maxy = maxy + 8 * deltay; deltax = (maxx - minx) / width; deltay = (maxy - miny) / height; // For predict for (var i = 0; i < width; i++) { xplot[i] = minx + i * deltax; } for (var i = 0; i < height; i++) { yplot[i] = miny + i * deltay; } var xx = new ManagedArray(2, height); for (var i = 0; i < width; i++) { for (var j = 0; j < height; j++) { xx[f1, j] = xplot[i]; xx[f2, j] = yplot[j]; } var p = network.Predict(xx, opts); for (var j = 0; j < height; j++) { data[i, j] = p[j]; } ManagedOps.Free(p); } var z = new double[] { 0.6, 0.8, 1 }; Conrec.Contour(data, xplot, yplot, z, ContourLine); Points(ContourGraph, network, opts, threshold, x, width, height, f1, f2); ManagedOps.Free(xx); var border = new Color(128, 128, 128); // Plot bounding box var cw = ContourGraph.Width - 1; var ch = ContourGraph.Height; Common.Line(ContourGraph, 0, 1, cw, 1, border); Common.Line(ContourGraph, cw, 1, cw, ch, border); Common.Line(ContourGraph, 0, ch, cw, ch, border); Common.Line(ContourGraph, 0, 1, 0, ch, border); return(ContourGraph); }
public static TrainingResult TrainOnMnist(NeuralNetworkOptions options) { var isVerbose = options.IsVerbose; var normalize = options.NormalizeInput; var isEncoder = options.IsEncoder; #region dump used params //lel var dumpling = JsonConvert.SerializeObject(options, Formatting.Indented); File.WriteAllText("neural_network.log", dumpling); #endregion NeuralNetwork mlp; if (isEncoder) { mlp = new Network.Encoder( options.ActivationFunction, options.InitialWeightsRange, options.Sizes); } else { mlp = new NeuralNetwork( options.ActivationFunction, options.InitialWeightsRange, options.Sizes); } if (_trainingSetPath != options.TrainingPath || _trainingSet == null) { _trainingSet = MnistParser.ReadAll(options.TrainingPath, normalize, isEncoder); _trainingSetPath = options.TrainingPath; } var trainingSet = _trainingSet; if (_testSetPath != options.TestPath || _testSet == null) { _testSet = MnistParser.ReadAll(options.TestPath, normalize, isEncoder); _testSetPath = options.TestPath; } var testSet = _testSet; if (_validationSetPath != options.ValidationPath || _validationSet == null) { _validationSet = MnistParser.ReadAll(options.ValidationPath, normalize, isEncoder); _validationSetPath = options.ValidationPath; } var validationSet = _validationSet; var trainingModel = new TrainingModel { MaxEpochs = options.MaxEpochs, ErrorThreshold = options.ErrorThreshold, ValidationSet = validationSet, TrainingSet = trainingSet, TestSet = testSet, IsVerbose = isVerbose, BatchSize = options.BatchSize, LearningRate = options.LearningRate, Momentum = options.Momentum, EvaluateOnEachEpoch = options.LogData, IsEncoder = options.IsEncoder, Lambda = options.Lambda, TakeBest = options.TakeBest }; var trainingResult = mlp.Train(trainingModel); return(trainingResult); }
/// <summary> /// Gets a new instance of <see cref="RegressionNeuralNetwork{TInputDataModel, TOutputDataModel}{TInputDataModel, TOutputDataModel}"/>. /// </summary> /// <typeparam name="TInputDataModel"></typeparam> /// <typeparam name="TOutputDataModel"></typeparam> /// <param name="options"></param> /// <returns></returns> public async ValueTask <RegressionNeuralNetwork <TInputDataModel, TOutputDataModel> > BuildRegressionNeuralNetwork <TInputDataModel, TOutputDataModel>(NeuralNetworkOptions options) { // Force options task to be classification options.Task = NeuralNetworkOptions.TrainingTask.Regression; return(await BuildNeuralNetwork <TInputDataModel, TOutputDataModel>(options) as RegressionNeuralNetwork <TInputDataModel, TOutputDataModel>); }
/// <summary> /// Gets a new instance of <see cref="NeuralNetwork"/>. /// </summary> /// <typeparam name="TInputDataModel"></typeparam> /// <typeparam name="TOutputDataModel"></typeparam> /// <param name="options"></param> /// <returns></returns> public async ValueTask <NeuralNetwork <TInputDataModel, TOutputDataModel> > BuildNeuralNetwork <TInputDataModel, TOutputDataModel>(NeuralNetworkOptions options) { NeuralNetwork <TInputDataModel, TOutputDataModel> retVal = null; // Classification NN if (options.Task == NeuralNetworkOptions.TrainingTask.Classification) { retVal = await JSRuntime.InvokeAsync <ClassificationNeuralNetwork <TInputDataModel, TOutputDataModel> >($"{INTEROP_GLOBAL_VARIABLE}.neuralNetwork.buildNeuralNetwork", options); } // Regression NN else if (options.Task == NeuralNetworkOptions.TrainingTask.Regression) { retVal = await JSRuntime.InvokeAsync <RegressionNeuralNetwork <TInputDataModel, TOutputDataModel> >($"{INTEROP_GLOBAL_VARIABLE}.neuralNetwork.buildNeuralNetwork", options); } // Set JS runtime. retVal.SetJSRuntime(this.JSRuntime); return(retVal); }
static void Main(string[] args) { if (!Optimizer.TryUseNativeMKL()) { Console.Error.WriteLine("Could not use MKL native library"); } var command = Command.Help; var nnJsonPath = ""; var isVerbose = false; var outputPath = ""; var imagePath = ""; var print = false; var evaluate = false; var dump = false; var isEncoder = false; var normalize = false; //mlp params int[] layersSizes = { 70, 100, 10 }; var learningRate = 0.3; var momentum = 0.9; double errorThreshold = 0; var batchSize = 10; var activationFunction = ActivationFunction.Sigmoid; var initialWeightsRange = 0.25; var lambda = 0.0; var takeBest = false; var imageWidth = 7; var maxEpochs = 200; string experimentValues = null; var experiment = Experiment.LearningRate; var repetitions = 3; ICommandLine commandLine = CommandLine .Help("h") .Help("help") .Command("test", () => command = Command.Test, true, "Test your MLP") .DefaultParameter("mlp", json => nnJsonPath = json, "MLP data in json format", "Json") .Parameter("image", path => imagePath = path, "Path to image", "Path to image") .Option("v", () => isVerbose = true, "Explain what is happening") .Option("verbose", () => isVerbose = true, "Explain what is happening") .Option("e", () => evaluate = true, "Evaluate using MNIST dataset") .Option("evaluate", () => evaluate = true, "Evaluate using MNIST dataset") .Option("n", () => normalize = true, "Normalize input") .Option("normalize", () => normalize = true, "Normalize input") .Command("train", () => command = Command.Train, "Train new MLP") .DefaultParameter("output", path => outputPath = path, "Output file to save trained mlp") .Parameter("sizes", sizes => layersSizes = JsonConvert.DeserializeObject <int[]>(sizes), "Number of layer and its sizes, default to [70,5,10]", "Sizes") .Parameter("learning-rate", val => learningRate = double.Parse(val, CultureInfo.InvariantCulture), "Learning rate") .Parameter("momentum", val => momentum = double.Parse(val, CultureInfo.InvariantCulture), "Momenum parameter") .Parameter("error-threshold", val => errorThreshold = double.Parse(val, CultureInfo.InvariantCulture), "Error threshold to set learning stop criteria") .Parameter("max-epochs", val => maxEpochs = int.Parse(val), "Program will terminate learning if reaches this epoch") .Parameter("batch-size", val => batchSize = int.Parse(val), "Batch size") .Parameter("activation", val => activationFunction = ParseActivationFunction(val), "Activation function, (sigmoid, tanh)") .Parameter("initial-weights", val => initialWeightsRange = double.Parse(val, CultureInfo.InvariantCulture), "Initial weights range [number](-number;number)") .Parameter("lambda", val => lambda = double.Parse(val, CultureInfo.InvariantCulture), "Lambda param for L2 regularization. Defaults to 0 (no regularization)") .Option("v", () => isVerbose = true, "Explain what is happening") .Option("verbose", () => isVerbose = true, "Explain what is happening") .Option("d", () => dump = true, "Dump training data") .Option("dump", () => dump = true, "Dump training data") .Option("n", () => normalize = true, "Normalize input") .Option("normalize", () => normalize = true, "Normalize input") .Option("e", () => isEncoder = true, "Use encoder mode") .Option("encoder", () => isEncoder = true, "Use encoder mode") .Option("take-best", () => takeBest = true, "Tries to pick best solution from all epochs") .Command("view", () => command = Command.View, "Show MNIST image") .DefaultParameter("path", path => imagePath = path, "Path to image") .Option("p", () => print = true, "Display grayscale interpretation") .Option("print", () => print = true, "Display grayscale interpretation") .Option("n", () => normalize = true, "Normalize input") .Option("normalize", () => normalize = true, "Normalize input") .Command("experiment", () => command = Command.Experiment, "Run experiment") .DefaultParameter("output", path => outputPath = path, "Path to save data") .Parameter("values", val => experimentValues = val, "Values to test in experiment", "Experiment values") .Parameter("experiment", val => experiment = ParseExperimentType(val), "Momenum parameter") .Parameter("sizes", sizes => layersSizes = JsonConvert.DeserializeObject <int[]>(sizes), "Number of layer and its sizes, default to [70,5,10]", "Sizes") .Parameter("learning-rate", val => learningRate = double.Parse(val, CultureInfo.InvariantCulture), "Learning rate") .Parameter("momentum", val => momentum = double.Parse(val, CultureInfo.InvariantCulture), "Momenum parameter") .Parameter("error-threshold", val => errorThreshold = double.Parse(val, CultureInfo.InvariantCulture), "Error threshold to set learning stop criteria") .Parameter("max-epochs", val => maxEpochs = int.Parse(val), "Program will terminate learning if reaches this epoch") .Parameter("batch-size", val => batchSize = int.Parse(val), "Batch size") .Parameter("activation", val => activationFunction = ParseActivationFunction(val), "Activation function, (sigmoid, tanh)") .Parameter("repetitions", val => repetitions = int.Parse(val, CultureInfo.InvariantCulture), "Number of repetitions for each value in experiment") .Parameter("initial-weights", val => initialWeightsRange = double.Parse(val, CultureInfo.InvariantCulture), "Initial weights range [number](-number;number)") .Parameter("lambda", val => lambda = double.Parse(val, CultureInfo.InvariantCulture), "Lambda param for L2 regularization. Defaults to 0 (no regularization)") .Option("v", () => isVerbose = true, "Explain what is happening") .Option("verbose", () => isVerbose = true, "Explain what is happening") .Option("n", () => normalize = true, "Normalize input") .Option("normalize", () => normalize = true, "Normalize input") .Option("e", () => isEncoder = true, "Use encoder mode") .Option("encoder", () => isEncoder = true, "Use encoder mode") .Command("features", () => command = Command.Features, "Print features") .Parameter("mlp", json => nnJsonPath = json, "MLP data in json format", "Json") .Parameter("output", path => outputPath = path, "Path to save features") .Parameter("width", val => imageWidth = int.Parse(val), "Input width to display feature as image") .Command("encoder", () => command = Command.Encoder, "Use encoder mode") .Parameter("mlp", json => nnJsonPath = json, "Encoder data in json format", "Json") .Parameter("image", path => imagePath = path, "Path to image", "Path to image") .Option("n", () => normalize = true, "Normalize input") .Option("normalize", () => normalize = true, "Normalize input") .End(); commandLine.Run(args); switch (command) { case Command.Train: { try { File.Create(outputPath).Close(); } catch (Exception) { Console.WriteLine($"Path is invalid"); return; } //Debugger.Launch(); var options = new NeuralNetworkOptions( learningRate, momentum, errorThreshold, layersSizes, isEncoder ? ENCODER_DATA_PATH : TRAINING_DATA_PATH, VALIDATION_PATH, TEST_DATA_PATH, maxEpochs, isVerbose, batchSize, activationFunction, initialWeightsRange, dump, normalize, isEncoder, lambda, takeBest ); var trainingResult = MnistTrainer.TrainOnMnist(options); var mlp = trainingResult.NeuralNetwork; File.WriteAllText(outputPath, mlp.ToJson()); if (dump) { var fi = new FileInfo(outputPath); var directory = fi.Directory?.FullName ?? ""; var fileName = Path.GetFileNameWithoutExtension(outputPath); directory += $"/{fileName}_"; ExperimentVisualization.GenerateErrorPlot(trainingResult, $"{directory}error", $"{fileName} - error"); if (!isEncoder) { ExperimentVisualization.GenerateEvaluationPlot(trainingResult, $"{directory}evaluation", $"{fileName} - evaluation"); } } break; } case Command.Test: { NeuralNetwork mlp; try { var json = File.ReadAllText(nnJsonPath); mlp = JsonConvert.DeserializeObject <NeuralNetwork>(json); } catch (Exception e) { Console.WriteLine(e); Console.WriteLine($"Path is invalid"); return; } if (!string.IsNullOrEmpty(imagePath)) { if (!File.Exists(imagePath)) { Console.WriteLine($"File {imagePath} does not exist!"); return; } var image = MnistParser.ReadImage(imagePath, normalize, isEncoder); var decision = mlp.Compute(image.Values); Console.WriteLine($"Result - {decision}"); Console.WriteLine($"Expected - {image.Label}"); } if (evaluate) { var testData = MnistParser.ReadAll(TEST_DATA_PATH, normalize, isEncoder); var evaluation = mlp.Evaluate(testData); Console.WriteLine($"Solutions - {evaluation.Correct} / {evaluation.All}"); Console.WriteLine($"Fitness - {evaluation.Percentage}"); } break; } case Command.View: { if (string.IsNullOrEmpty(imagePath)) { Console.WriteLine($"Path to image not set"); return; } if (!File.Exists(imagePath)) { Console.WriteLine($"File {imagePath} does not exist!"); return; } var model = MnistParser.ReadImage(imagePath, normalize, isEncoder); var modelDump = MnistViewer.Dump(model); Console.WriteLine(modelDump); if (print) { var modelMatrix = MnistViewer.ToMatrix(model.Values, model.Width); Console.Write(modelMatrix); } break; } case Command.Help: commandLine.Run("help"); break; case Command.Experiment: { var options = new NeuralNetworkOptions( learningRate, momentum, errorThreshold, layersSizes, isEncoder ? ENCODER_DATA_PATH : TRAINING_DATA_PATH, VALIDATION_PATH, TEST_DATA_PATH, maxEpochs, isVerbose, batchSize, activationFunction, initialWeightsRange, true, normalize, isEncoder, lambda, takeBest ); switch (experiment) { case Experiment.LearningRate: { var values = JsonConvert.DeserializeObject <double[]>(experimentValues); ExperimentRunner.RunLearningRateExperiment( values, options, repetitions, outputPath ); break; } case Experiment.ActivationFunction: { var values = JsonConvert.DeserializeObject <ActivationFunction[]>(experimentValues); ExperimentRunner.RunActivationFunctionExperiment( values, options, repetitions, outputPath ); break; } case Experiment.Momentum: { var values = JsonConvert.DeserializeObject <double[]>(experimentValues); ExperimentRunner.RunMomentumExperiment( values, options, repetitions, outputPath ); break; } case Experiment.InitialWeights: { var values = JsonConvert.DeserializeObject <double[]>(experimentValues); ExperimentRunner.RunInitialWeightsRangeExperiment( values, options, repetitions, outputPath ); break; } case Experiment.Sizes: { var values = JsonConvert.DeserializeObject <int[][]>(experimentValues); ExperimentRunner.RunSizeExperiment( values, options, repetitions, outputPath ); break; } default: throw new ArgumentOutOfRangeException(); } break; } case Command.Features: { NeuralNetwork mlp; try { var json = File.ReadAllText(nnJsonPath); mlp = JsonConvert.DeserializeObject <NeuralNetwork>(json); } catch (Exception e) { Console.WriteLine(e); Console.WriteLine($"Path is invalid"); return; } var layerFeatures = mlp.GetFeatures(); if (Directory.Exists(outputPath)) { Directory.Delete(outputPath, true); } Directory.CreateDirectory(outputPath); for (int layerIndex = 0; layerIndex < layerFeatures.Length; layerIndex++) { var features = layerFeatures[layerIndex]; var path = $"{outputPath}/{layerIndex}"; Directory.CreateDirectory(path); for (int i = 0; i < features.Length; i++) { var feature = features[i]; var image = MnistViewer.ToImage(feature, imageWidth); image.Save($"{path}/{i}.png", ImageFormat.Png); } } break; } case Command.Encoder: { Encoder.Network.Encoder encoder; try { var json = File.ReadAllText(nnJsonPath); encoder = JsonConvert.DeserializeObject <Encoder.Network.Encoder>(json); } catch (Exception e) { Console.WriteLine(e); Console.WriteLine($"Path is invalid"); return; } if (!string.IsNullOrEmpty(imagePath)) { if (!File.Exists(imagePath)) { Console.WriteLine($"File {imagePath} does not exist!"); return; } var image = MnistParser.ReadImage(imagePath, normalize, true); var recreatedData = encoder.Compute(image.Values); var recreatedImage = MnistViewer.ToImage(recreatedData, imageWidth); File.Copy(imagePath, "original.png", true); recreatedImage.Save($"decoded.png", ImageFormat.Png); } break; } default: throw new ArgumentOutOfRangeException(); } }
public static void RunSizeExperiment( int[][] sizesArray, NeuralNetworkOptions options, int repetitions, string logPath ) { //disable early learning end options.ErrorThreshold = 0; var isVerbose = options.IsVerbose; var mainDir = logPath.Split('/')[0]; if (Directory.Exists(mainDir)) { ClearDirectory(mainDir); } Directory.CreateDirectory(mainDir); for (var i = 0; i < sizesArray.Length; i++) { var sizes = sizesArray[i]; var serializedSizes = JsonConvert.SerializeObject(sizes); Console.WriteLine($"Running experiment for {serializedSizes}"); var trainingOptions = new NeuralNetworkOptions( options.LearningRate, options.Momentum, options.ErrorThreshold, sizes, options.TrainingPath, options.ValidationPath, options.TestPath, options.MaxEpochs, options.IsVerbose, options.BatchSize, options.ActivationFunction, options.InitialWeightsRange, true, options.NormalizeInput, options.IsEncoder, options.Lambda, options.TakeBest ); #region dump used params //lel var dumpling = JsonConvert.SerializeObject(options, Formatting.Indented); File.WriteAllText(logPath + ".log", dumpling); #endregion var trainingResponses = new TrainingResult[repetitions]; var runLogPath = logPath + "/" + serializedSizes; Directory.CreateDirectory(runLogPath); //gather data for (var j = 0; j < repetitions; j++) { var trainingResponse = MnistTrainer.TrainOnMnist(trainingOptions); trainingResponses[j] = trainingResponse; File.WriteAllText($"{runLogPath}/{serializedSizes}_{j}.json", trainingResponse.NeuralNetwork.ToJson()); } var fileName = logPath + "_" + serializedSizes; //log data var path = fileName + ".csv"; //File.Create(path); var log = new StringBuilder("sep=|"); log.AppendLine(); log.Append("epoch"); for (var j = 0; j < trainingResponses.Length; j++) { log.Append("|evaluation_" + j + "|error_" + j); } log.AppendLine(); for (var j = 0; j < trainingResponses[0].Epochs; j++) { log.Append(j); for (var n = 0; n < trainingResponses.Length; n++) { var result = trainingResponses[n]; log.Append("|" + result.Evaluations[j].Percentage + "|" + result.EpochErrors[j]); } log.AppendLine(); } File.WriteAllText(path, log.ToString()); #region dump plot if (!options.IsEncoder) { ExperimentVisualization.GenerateEvaluationPlot(trainingResponses, fileName, fileName); } ExperimentVisualization.GenerateErrorPlot(trainingResponses, fileName, fileName); #endregion } }