public void SimpleNeuralNetworkTest() { var takeN = 100; var xPath = FileManagerTest.GetTestDataByName("HandwrittenDigit", "dataX.csv"); var doubleReader = new CsvDoubleReader(); var xArray = doubleReader.GetData(xPath, ","); var x = new Matrix(xArray.Take(takeN)); var yPath = FileManagerTest.GetTestDataByName("HandwrittenDigit", "datay.csv"); var intReader = new CsvIntReader(); var yArray = intReader.GetData(yPath, ",").Take(takeN).ToArray(); var y = MatrixUtils.VectorToBinaryMatrix(yArray.Select(z => z[0]).ToArray(), 10); var neuralNetworkParameters = new SimpleNeuralNetworkParameters() { Alpha = 1, HiddenLayerSize = 25, InputLayerSize = 400, IterationCount = 10, LabelCount = 10, Lambda = 1, X = x, Y = y, }; var neuralNetwork = new SimpleNeuralNetwork(); var neuralNetworkResults = neuralNetwork.Compute(neuralNetworkParameters); var prediction = neuralNetwork.Predict(neuralNetworkResults.Theta1, neuralNetworkResults.Theta2, x, x.RowCount); }
protected List <PredictionOfCurrencyLearnResult> FitSimpleNeuralNetwork(bool saveLearnResult = false) { InitData(); InitSimpleNeuralNetwork(); var result = new List <PredictionOfCurrencyLearnResult>(); for (var i = 0; i < DataManager.LearnData.Data.Count - 1; i++) { var input = DataManager.LearnData[i].Vector; var ideal = (Vector) new double[] { DataManager.LearnData[i + 1].Vector.Values.Last() }; var ideal1 = (Vector) new double[] { DataManager.LearnData[i + 1].Vector.Values.Last() }; var(output, error) = SimpleNeuralNetwork.Learn(input, ideal); result.Add(new PredictionOfCurrencyLearnResult { Date = DataManager.LearnData.Data[i].Date, Error = error, Output = DataManager.ConvertOutput(SimpleNeuralNetwork.ConvertOutput(output)), Ideal = DataManager.ConvertOutput(ideal1), Input = DataManager.ConvertInput(input), }); } SimpleNeuralNetwork.Save(NeuralNetworkName); if (saveLearnResult) { SaveLearnProgress(result); } return(result); }
public void Train_RuningTraining_NetworkIsTrained() { var network = new SimpleNeuralNetwork(3); var layerFactory = new NeuralLayerFactory(); network.AddLayer(layerFactory.CreateNeuralLayer(3, new RectifiedActivationFuncion(), new WeightedSumFunction())); network.AddLayer(layerFactory.CreateNeuralLayer(1, new SigmoidActivationFunction(0.7), new WeightedSumFunction())); network.PushExpectedValues( new double[][] { new double[] { 0 }, new double[] { 1 }, new double[] { 1 }, new double[] { 0 }, new double[] { 1 }, new double[] { 0 }, new double[] { 0 }, }); network.Train( new double[][] { new double[] { 150, 2, 0 }, new double[] { 1002, 56, 1 }, new double[] { 1060, 59, 1 }, new double[] { 200, 3, 0 }, new double[] { 300, 3, 1 }, new double[] { 120, 1, 0 }, new double[] { 80, 1, 0 }, }, 10000); network.PushInputValues(new double[] { 1054, 54, 1 }); var outputs = network.GetOutput(); }
private void InitializeDataAndNeurons() { NormalizeData(csvPath, csvPathNormalized); NormalizeData(csvPathTest, csvPathNormalizedTest); Network = new SimpleNeuralNetwork((double)learningRate, (double)momentumRate, bias); if (isRegression == false) { DataPointsClassificationTraining = (new ImportDataPointSets(csvPathNormalized).DataPoints); DataPointClassificationTest = (new ImportDataPointSets(csvPathNormalizedTest).DataPoints); Network.TrainingSet = Network.InitializeClassificationSet(DataPointsClassificationTraining, 4); Network.TestSet = Network.InitializeClassificationSet(DataPointClassificationTest, 4); Network.AddLayer(2); Network.AddLayerBunch(Layers, Neurons); Network.AddLayer(4); } else { DataPointsRegressionTraining = (new ImportDataPointsSetsRegression(csvPathNormalized).DataPoints); DataPointRegressionTest = (new ImportDataPointsSetsRegression(csvPathNormalizedTest).DataPoints); Network.TrainingSet = Network.InitializeRegressionSet(DataPointsRegressionTraining); Network.TestSet = Network.InitializeRegressionSet(DataPointRegressionTest); Network.AddLayer(1); Network.AddLayerBunch(Layers, Neurons); Network.AddLayer(1); } }
public void SimpleNeuralNetworkTest2() { var x = new double[, ] { { 0.1683, -0.1923 }, { 0.1819, -0.1502 }, { 0.0282, 0.0300 }, { -0.1514, 0.1826 }, { -0.1918, 0.1673 }, { -0.0559, -0.0018 }, { 0.1314, -0.1692 }, { 0.1979, -0.1811 }, { 0.0824, -0.0265 }, { -0.1088, 0.1525 }, { -0.2000, 0.1913 }, { -0.1073, 0.0542 }, { 0.0840, -0.1327 }, { 0.1981, -0.1976 }, { 0.1301, -0.0808 }, { -0.0576, 0.1103 } }; var y = new int[] { 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1 }; var yMat = MatrixUtils.VectorToBinaryMatrix(y); var theta1 = new double[, ] { { 0.0181, 0.0771, 0.0358 }, { -0.1057, -0.1163, 0.0556 }, { -0.0637, -0.1097, 0.0355 }, { -0.0352, -0.0794, -0.0118 } }; var theta2 = new double[, ] { { 0.0113, 0.0448, 0.0673, -0.0032, 0.0020 }, { -0.0489, -0.0760, -0.1005, -0.0154, 0.0026 }, { 0.0587, -0.0316, 0.1031, -0.0128, 0.0762 }, { -0.0747, 0.0301, 0.0662, -0.0465, 0.0708 } }; var neuralNetworkParameters = new SimpleNeuralNetworkParameters() { Alpha = 1, HiddenLayerSize = 4, InputLayerSize = 2, IterationCount = 50, LabelCount = 4, Lambda = 1, X = new Matrix(x), Y = yMat, Theta1Init = new Matrix(theta1), Theta2Init = new Matrix(theta2) }; var neuralNetwork = new SimpleNeuralNetwork(); var neuralNetworkResults = neuralNetwork.Compute(neuralNetworkParameters); }
public void PushInputValues_ValuesSentToNetwork_ValuesSetOnInput() { var network = new SimpleNeuralNetwork(3); network.PushInputValues(new double[] { 3, 5, 7 }); Assert.AreEqual(3, network._layers.First().Neurons.First().Inputs.First().GetOutput()); }
public AgentSmith() { _network = new SimpleNeuralNetwork(11); var layerFactory = new NeuralLayerFactory(); _network.AddLayer(layerFactory.CreateNeuralLayer(10, new RectifiedActivationFuncion(), new WeightedSumFunction())); _network.AddLayer(layerFactory.CreateNeuralLayer(1, new SigmoidActivationFunction(1), new WeightedSumFunction())); }
public void AddLayer_NeuralAddingNewLayer_LayerAdded() { var network = new SimpleNeuralNetwork(3); var layerFactory = new NeuralLayerFactory(); network.AddLayer(layerFactory.CreateNeuralLayer(3, new RectifiedActivationFuncion(), new WeightedSumFunction())); Assert.AreEqual(2, network._layers.Count); }
public void PushInputValues_ValuesSentToNetwork_ValuesSetOnInput() { var network = new SimpleNeuralNetwork(6); network.PushInputValues(new double[] { 23, 565, 789, 3, 90, 23 }); Assert.AreEqual(23, network._layers.First().Neurons.First().Inputs.First().GetOutput()); Console.WriteLine(JsonConvert.SerializeObject(network, Formatting.Indented)); }
public override void Load(BinaryReader r) { base.Load(r); charsPerImage = r.ReadInt32(); charsSet = r.ReadString().ToCharArray().ToList().Select(c => c.ToString()).ToList(); learnRate = r.ReadDouble(); sann = SimpleNeuralNetwork.Load(r); sann.OnTrainingProgressChange += new SimpleNeuralNetwork.TrainingProgressHandler(sann_OnTrainingProgressChange); }
public void Initialization_Constructor_NeuralNetworkInitialized() { var network = new SimpleNeuralNetwork(6); Assert.AreEqual(1, network._layers.Count); Assert.AreEqual(6, network._layers.First().Neurons.Count); Assert.AreEqual(2.95, network._learningRate); }
void Start() { this.idle = true; this.verificationMenu = null; //this.employeeName = "Anon"; //this.network = new SimpleNeuralNetwork((int) Menu.items.Count, new int[]{Mathf.FloorToInt(Mathf.Max((float) Menu.items.Count, (float) Recipes.ingredients.Count))}, (int) Recipes.ingredients.Count); this.network = new SimpleNeuralNetwork((int)Menu.items.Count, (int)Recipes.ingredients.Count); this.path = new Stack <Node>(); this.destinations = new Stack <int>(); }
public void PushExpectedResults_ValuesSentToNetwork_ValuesStored() { var network = new SimpleNeuralNetwork(3); network.PushExpectedValues(new double[][] { new double[] { 3, 5, 7 } }); Assert.AreEqual(3, network._expectedResult[0][0]); Assert.AreEqual(5, network._expectedResult[0][1]); Assert.AreEqual(7, network._expectedResult[0][2]); }
public void AddLayer_NeuralAddingNewLayer_LayerAdded() { var network = new SimpleNeuralNetwork(6); var layerFactory = new NeuralLayerFactory(); network.PushInputValues(new double[] { 23, 565, 789, 3, 90, 23 }); network.AddLayer(layerFactory.CreateNeuralLayer(3, new RectifiedActivationFuncion(), new WeightedSumFunction())); Assert.AreEqual(2, network._layers.Count); Console.WriteLine(JsonConvert.SerializeObject(network, Formatting.Indented)); }
public override void Load(BinaryReader r) { base.Load(r); charsPerImage = r.ReadInt32(); charsSet = r.ReadString().ToCharArray().ToList().Select(c => c.ToString()).ToList(); learnRate = r.ReadDouble(); int count = r.ReadInt32(); sann = new List <SpecialNeuralNet>(); for (int i = 0; i < count; i++) { sann.Add(new SpecialNeuralNet()); sann[i].NeuralNet = SimpleNeuralNetwork.Load(r); sann[i].NeuralNet.OnTrainingProgressChange += new SimpleNeuralNetwork.TrainingProgressHandler(sann_OnTrainingProgressChange); sann[i].Solution = r.ReadString(); sann[i].LastOutput = r.ReadDouble(); } }
public static void InitializeSimpleNetwork(int interations) { NormalizeData(); List <DataPointCls> points = (new ImportDataPointSets(csvPathNormalized).DataPoints); SimpleNeuralNetwork myNetwork = new SimpleNeuralNetwork(); //myNetwork.InitializeTrainingSet(points,4); var ts = myNetwork.TrainingSet; myNetwork.ActivationFunction = new ActivationBiPolar(); myNetwork.AddLayer(2); myNetwork.AddLayerBunch(2, 4); myNetwork.AddLayer(4); myNetwork.StartLearning(interations); //ErrorCalculator.CalculateError(myNetwork.ComputeTrainingSet().ToList(), myNetwork); }
static void Main(string[] args) { var network = new SimpleNeuralNetwork(1); var layerFactory = new NeuralLayerFactory(); network.AddLayer(layerFactory.CreateNeuralLayer(2, new RectifiedActivationFuncion(), new WeightedSumFunction())); network.AddLayer(layerFactory.CreateNeuralLayer(1, new SigmoidActivationFunction(0.4), new WeightedSumFunction())); double[][] expectedValues = new double[samples][]; double[][] trainingValues = new double[samples][]; for (int i = 0; i < samples; i++) { Random rng = new Random(); Random rng2 = new Random(); int val1 = rng.Next(rng.Next() % 1000); int val2 = rng2.Next(i % 900); expectedValues[i] = new double[] { (val1 + val2) % 2 }; trainingValues[i] = new double[] { val1, val2 }; Console.WriteLine($"val1: {val1} val2: {val2} sum: { (val1 + val2) % 2 }"); } network.PushExpectedValues(expectedValues); network.Train(trainingValues, 5000); network.PushInputValues(new double[] { 1054, 54 }); var outputs = network.GetOutput(); Console.WriteLine($"network output: {string.Join(", ", outputs)}"); Console.ReadKey(); }
static void Train_predict_and_save(string filepath) { // train & testing data var training_data = new Dictionary <int, Tuple <float[], float[]> > { // ys xs [0] = new Tuple <float[], float[]>(new float[] { 0, 1 }, new float[] { 1 }), [1] = new Tuple <float[], float[]>(new float[] { 1, 0 }, new float[] { 1 }), [2] = new Tuple <float[], float[]>(new float[] { 0, 0 }, new float[] { 0 }), [3] = new Tuple <float[], float[]>(new float[] { 1, 1 }, new float[] { 0 }) }; var snn = new SimpleNeuralNetwork(2, 0.4f, Activation.FunctionsEnum.Sigmoid); snn.Add(4); snn.Add(1); // train Console.WriteLine("Entrenamiento:\n"); Random random = new Random(); int j = 0; Console.WriteLine("Training ..."); for (int i = 0; i < 100000; i++) { j = random.Next(4); snn.Train(training_data[j].Item1, training_data[j].Item2); } // predict Console.WriteLine("\nPredicciones:\n"); for (int i = 0; i < 4; i++) { var res = snn.Predict(training_data[i].Item1); Console.WriteLine(string.Format("xs [ {0}, {1} ] = {2}", training_data[i].Item1[0], training_data[i].Item1[1], res[0])); } SimpleNeuralNetwork.Save(snn, filepath); Console.WriteLine("\nRed Neuronal guardada !!.\n"); }
static void Main(string[] args) { //XOR function //0, 0 | 0 //0, 1 | 1 //1, 0 | 1 //1, 1 | 0 var dataset = new[] { new XORSample { A = 0, B = 0, Label = 0 }, new XORSample { A = 0, B = 1, Label = 1 }, new XORSample { A = 1, B = 0, Label = 1 }, new XORSample { A = 1, B = 1, Label = 0 }, }; var net = new SimpleNeuralNetwork(ActivationFunctions.Sigmoid); net.AddLayer(2); net.AddLayer(3); net.AddLayer(1); net.Build(); foreach (var sample in dataset) { var output = net.ForwardPass(new [] { (float)sample.A, (float)sample.B }); Console.WriteLine($"Forward pass: {sample.A}, {sample.B}. Output: {output[0]}. Label: {sample.Label}"); } Console.ReadKey(); }
public SimpleNeuralNetSolver(string characterSet, int imageWidth, int imageHeight, int hiddenNeurons, int charactersPerImage, double learningRate) { // Split the set of characters into a list charsSet = characterSet.ToCharStringList(); charsPerImage = charactersPerImage; learnRate = learningRate; ExpectedWidth = imageWidth; ExpectedHeight = imageHeight; sann = new SimpleNeuralNetwork(imageWidth * imageHeight, hiddenNeurons, characterSet.Length, learningRate); sann.OnTrainingProgressChange += new SimpleNeuralNetwork.TrainingProgressHandler(sann_OnTrainingProgressChange); trainerWorker = new BackgroundWorker(); trainerWorker.DoWork += new DoWorkEventHandler(worker_DoWork); trainerWorker.RunWorkerCompleted += new RunWorkerCompletedEventHandler(worker_RunWorkerCompleted); trainerWorker.ProgressChanged += new ProgressChangedEventHandler(worker_ProgressChanged); trainerWorker.WorkerReportsProgress = true; testerWorker = new BackgroundWorker(); testerWorker.DoWork += new DoWorkEventHandler(testerWorker_DoWork); testerWorker.RunWorkerCompleted += new RunWorkerCompletedEventHandler(testerWorker_RunWorkerCompleted); }
static void Load_and_predict(string filepath) { // train & testing data var training_data = new Dictionary <int, Tuple <float[], float[]> > { // ys xs [0] = new Tuple <float[], float[]>(new float[] { 0, 1 }, new float[] { 1 }), [1] = new Tuple <float[], float[]>(new float[] { 1, 0 }, new float[] { 1 }), [2] = new Tuple <float[], float[]>(new float[] { 0, 0 }, new float[] { 0 }), [3] = new Tuple <float[], float[]>(new float[] { 1, 1 }, new float[] { 0 }) }; var snn = SimpleNeuralNetwork.Load(filepath); Console.WriteLine("Red Neuronal cargada !!.\n"); // predict Console.WriteLine("\nPredicciones:\n"); for (int i = 0; i < 4; i++) { var res = snn.Predict(training_data[i].Item1); Console.WriteLine(string.Format("xs [ {0}, {1} ] = {2}", training_data[i].Item1[0], training_data[i].Item1[1], res[0])); } }
public void TrainNetwork_6Inputs_3HiddenLayer_2Outputs() { var network = new SimpleNeuralNetwork(6, 1.95); // six input nuerons var layerFactory = new NeuralLayerFactory(); network.AddLayer(layerFactory.CreateNeuralLayer(3, new SigmoidActivationFunction(0.7), new WeightedSumFunction())); // three hidden layers network.AddLayer(layerFactory.CreateNeuralLayer(2, new LazyOutputFunction(), new WeightedSumFunction())); // two output layers network.PushExpectedValues( new double[][] { new double[] { 0.25, 0.20 }, new double[] { 0.10, 0.05 }, new double[] { 0.16, 0.30 }, new double[] { 0.30, 0.10 }, new double[] { 0.25, 0.20 }, new double[] { 0.10, 0.05 }, new double[] { 0.16, 0.30 }, new double[] { 0.30, 0.10 }, }); network.Train( new double[][] { new double[] { 150, 0, 0, 34, 35, 56 }, new double[] { 190, 23, 56, 0, 29, 529 }, new double[] { 290, 3, 108, 24, 189, 20 }, new double[] { 290, 67, 6, 0, 1, 0 }, new double[] { 150, 0, 0, 34, 35, 56 }, new double[] { 190, 23, 56, 0, 29, 529 }, new double[] { 290, 3, 108, 24, 189, 20 }, new double[] { 290, 67, 6, 0, 1, 0 }, }, 10000); network.PushInputValues(new double[] { 150, 0, 0, 34, 35, 56 }); var outputs = network.GetOutput(); Console.WriteLine(outputs[0].ToString() + " " + outputs[1].ToString() + "\n\n" + JsonConvert.SerializeObject(network, Formatting.Indented)); }
// public void setNeuralNetwork(RNN network, string cameraPosition){ // setNeuralNetwork(network.getNetwork(), cameraPosition); // } public void setNeuralNetwork(SimpleNeuralNetwork network, string cameraPosition) { int numberOfLayers = network.getNumberofLayers(); cameraObject = Instantiate(neuralNetworkViewCameraPrefab, new Vector3(1000f, 1000f, -10f), Quaternion.identity); camera = cameraObject.GetComponent <Camera>(); if (cameraPosition == "left") { camera.rect = new Rect(0.0f, 0.0f, 0.34f, 1.0f); } else if (cameraPosition == "top-left") { camera.rect = new Rect(0.0f, 0.3f, 0.3f, 0.4f); } else if (cameraPosition == "center") { camera.rect = new Rect(0.3f, 0.3f, 0.4f, 0.4f); } else { camera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f); } cameraBounds = new Vector2(camera.orthographicSize * 2 * camera.aspect, camera.orthographicSize * 2); this.network = network; neurons = new List <List <GameObject> >(); int[] neuronDimensions = new int[numberOfLayers + 1]; for (int i = 0; i <= numberOfLayers - 1; i++) { neuronDimensions[i] = network.getLayerSize(i); } neuronDimensions[neuronDimensions.Length - 1] = numberOfLayers; int largestDimension = Mathf.Max(neuronDimensions); largestDimension = largestDimension * 2 + 2; neuronScale = new Vector3(Mathf.Min(cameraBounds.x, cameraBounds.y) / largestDimension, Mathf.Min(cameraBounds.x, cameraBounds.y) / largestDimension, Mathf.Min(cameraBounds.x, cameraBounds.y) / largestDimension); List <GameObject> layer = new List <GameObject>(); GameObject tempNeuron; Vector3 tempNeuronLocation; //The origin is the bottom left corner of camera space Vector3 origin = new Vector3(camera.transform.position.x - (cameraBounds.x / 2), camera.transform.position.y - (cameraBounds.y / 2), 0.0f); //The spacing between network layers whether separated along the x or y axis; float layerSpacing = (cameraBounds.x > cameraBounds.y) ? cameraBounds.x / (1 + numberOfLayers) : cameraBounds.y / (1 + numberOfLayers); for (int i = 0; i < numberOfLayers; i++) { int layerSize = (i == 0) ? network.getNumberofInputs() : network.getLayerSize(i); float neuronSpacing = (cameraBounds.y / (1 + layerSize)); for (int j = 0; j < layerSize; j++) { tempNeuronLocation = (cameraBounds.x >= cameraBounds.y) ? new Vector3(origin.x + (i + 1) * layerSpacing, origin.y + (j + 1) * neuronSpacing, 0.0f) : new Vector3(origin.x + (j + 1) * neuronSpacing, origin.y + (i + 1) * layerSpacing, 0.0f); tempNeuron = (GameObject)Instantiate(neuronPrefab, tempNeuronLocation, Quaternion.identity, gameObject.transform); tempNeuron.transform.localScale = neuronScale; tempNeuron.GetComponent <UnityNeuron>().setColor(network.getNeuronValue(i, j)); layer.Add(tempNeuron); } neurons.Add(layer); layer = new List <GameObject>(); } for (int i = 1; i < neurons.Count; i++) { for (int j = 0; j < neurons[i].Count; j++) { float[] weights = new float[0]; try{ weights = network.getNeuronWeights(i, j); } catch (System.ArgumentOutOfRangeException) { throw new System.ArgumentException("NeuralNetworkView::setNetwork ~ network weight set indices out of range\nLayer: " + i.ToString() + ", Neuron: " + j.ToString()); } for (int k = 0; k < neurons[i - 1].Count; k++) { connectNeurons(neurons[i - 1][k], neurons[i][j], weights[k]); } } } setNeuronActivationFunctions(network.getActivationFunctions()); }
static void TestNeuralNetworks() { SimpleNeuralNetwork brain = new SimpleNeuralNetwork(2, 25, 1, ActivationFunctions.Sigmoid, ActivationFunctions.Sigmoid, ActivationFunctions.Derivatives.Sigmoid, ActivationFunctions.Derivatives.Sigmoid) { AddBias = true, LearningRate = 0.01f, }; /*// XOR TRAINING * Dictionary<float[], float[]> trainingData = new Dictionary<float[], float[]>(); * trainingData.Add(new float[] { 0, 0 }, new float[] { 0, }); * trainingData.Add(new float[] { 0, 1 }, new float[] { 1, }); * trainingData.Add(new float[] { 1, 0 }, new float[] { 1, }); * trainingData.Add(new float[] { 1, 1 }, new float[] { 0, }); * * for (int i = 0; i < 100000; i++) * { * var ctd = trainingData.ElementAt(RandomGenerator.GetInt(trainingData.Count)); * brain.Backpropagation(ctd.Key, ctd.Value); * } * WriteLine("Training end!"); * * * Get(0, 0); * Get(0, 1); * Get(1, 0); * Get(1, 1); */ Dictionary <float[], float[]> trainingData = new Dictionary <float[], float[]>(); trainingData.Add(new float[] { 0, 0.0f }, new float[] { 0.0f, }); trainingData.Add(new float[] { 0, 0.1f }, new float[] { 0.1f, }); trainingData.Add(new float[] { 0, 0.2f }, new float[] { 0.2f, }); trainingData.Add(new float[] { 0, 0.3f }, new float[] { 0.3f, }); trainingData.Add(new float[] { 0, 0.4f }, new float[] { 0.4f, }); trainingData.Add(new float[] { 0, 0.5f }, new float[] { 0.5f, }); trainingData.Add(new float[] { 0, 0.6f }, new float[] { 0.6f, }); trainingData.Add(new float[] { 0, 0.7f }, new float[] { 0.7f, }); trainingData.Add(new float[] { 0, 0.8f }, new float[] { 0.8f, }); trainingData.Add(new float[] { 0, 0.9f }, new float[] { 0.9f, }); trainingData.Add(new float[] { 0, 1.0f }, new float[] { 1.0f, }); int length = trainingData.Count; for (int i = 0; i < 100000; i++) { var ctd = trainingData.ElementAt(RandomGenerator.GetInt(length)); brain.Backpropagation(ctd.Key, ctd.Value); } WriteLine("Training end!"); WriteLine(); WriteLine("0.0"); Get(0, 0.0f); WriteLine("0.2"); Get(0, 0.2f); WriteLine("0.4"); Get(0, 0.4f); WriteLine("0.6"); Get(0, 0.6f); WriteLine("0.8"); Get(0, 0.8f); void Get(float x0, float x1) { float[] output = brain.Feedforward(new float[] { x0, x1 }); WriteLine(Math.Round(output[0], 1)); WriteLine(); } }
void Start() { train = true; run = false; Menu.setMenu(new int[] { (int)Menu.items.Gnocchi, (int)Menu.items.Pizza, (int)Menu.items.Frittata }); recipes = new Recipes(); //employee = new SimpleNeuralNetwork((int) Menu.items.Count, new int[]{Mathf.FloorToInt(Mathf.Sqrt(Mathf.Max((float) Menu.items.Count, (float) Recipes.ingredients.Count)))}, (int) Recipes.ingredients.Count); //employee = new SimpleNeuralNetwork((int) Menu.items.Count, new int[]{Mathf.FloorToInt(Mathf.Max((float) Menu.items.Count, (float) Recipes.ingredients.Count))}, (int) Recipes.ingredients.Count); //employee = new SimpleNeuralNetwork((int) Menu.items.Count, Functions.initArray(3,Mathf.FloorToInt(Mathf.Max((float) Menu.items.Count, (float) Recipes.ingredients.Count))), (int) Recipes.ingredients.Count); employee = new SimpleNeuralNetwork((int)Menu.items.Count, (int)Recipes.ingredients.Count); employee.setActivationFunction("softmax"); employee.randomizeWeights(0.1f); employee.randomizeBiases(0.1f); employee.setLearningRate(0.5f); buttonColumns = new GameObject[uiColumns]; buttons = new GameObject[(int)Menu.items.Count]; clicked = Functions.initArray((int)Menu.items.Count, false); textColumns = new GameObject[uiColumns]; ingredientText = new GameObject[(int)Recipes.ingredients.Count]; neuralNetworkView = Instantiate(neuralNetworkView, Vector3.zero, Quaternion.identity); neuralNetworkView.GetComponent <NeuralNetworkView>().setNeuralNetwork(employee, "center"); string[] menuItems = System.Enum.GetNames(typeof(Menu.items)); string[] ingredients = System.Enum.GetNames(typeof(Recipes.ingredients)); for (int i = 0; i < menuItems.Length - 1; i++) { if (i < uiColumns) { buttonColumns[i] = Instantiate(buttonColumnPrefab, Vector3.zero, Quaternion.identity, buttonGrid.transform); buttons[i] = Instantiate(buttonPrefab, Vector3.zero, Quaternion.identity, buttonColumns[i].transform); } else { buttons[i] = Instantiate(buttonPrefab, Vector3.zero, Quaternion.identity, buttonColumns[i % uiColumns].transform); } buttons[i].GetComponent <Image>().color = white; buttons[i].transform.Find("Text").GetComponent <Text>().text = menuItems[i]; int buttonIndex = i; buttons[i].GetComponent <Button>().onClick.AddListener(delegate { click(buttonIndex); }); } for (int i = 0; i < ingredients.Length - 1; i++) { if (i < uiColumns) { textColumns[i] = Instantiate(buttonColumnPrefab, Vector3.zero, Quaternion.identity, textGrid.transform); ingredientText[i] = Instantiate(textPrefab, Vector3.zero, Quaternion.identity, textColumns[i].transform); } else { ingredientText[i] = Instantiate(textPrefab, Vector3.zero, Quaternion.identity, textColumns[i % uiColumns].transform); } ingredientText[i].GetComponent <Image>().color = white; ingredientText[i].transform.Find("Text").GetComponent <Text>().text = ingredients[i]; } updateButtonColors(); }