public JacobianChainRule(BasicNetwork network, IMLDataSet indexableTraining) { BasicMLData data; BasicMLData data2; if (0 == 0) { goto Label_0055; } Label_0009: this._x61830ac74d65acc3 = new BasicMLDataPair(data, data2); return; Label_0055: this._xb12276308f0fa6d9 = indexableTraining; if (0 == 0) { } this._x87a7fc6a72741c2e = network; this._xabb126b401219ba2 = network.Structure.CalculateSize(); this._x530ae94d583e0ea1 = (int) this._xb12276308f0fa6d9.Count; this._xbdeab667c25bbc32 = EngineArray.AllocateDouble2D(this._x530ae94d583e0ea1, this._xabb126b401219ba2); this._xc8a462f994253347 = new double[this._x530ae94d583e0ea1]; data = new BasicMLData(this._xb12276308f0fa6d9.InputSize); data2 = new BasicMLData(this._xb12276308f0fa6d9.IdealSize); if (-2147483648 != 0) { goto Label_0009; } goto Label_0055; }
public static IMLDataSet LoadCSVToDataSet(FileInfo fileInfo, int inputCount, int outputCount, bool randomize = true, bool headers = true) { BasicMLDataSet result = new BasicMLDataSet(); CultureInfo CSVformat = new CultureInfo("en"); using (TextFieldParser parser = new TextFieldParser(fileInfo.FullName)) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); if (headers) parser.ReadFields(); while (!parser.EndOfData) { //Processing row string[] fields = parser.ReadFields(); var input = new BasicMLData(inputCount); for (int i = 0; i < inputCount; i++) input[i] = double.Parse(fields[i], CSVformat); var ideal = new BasicMLData(outputCount); for (int i = 0; i < outputCount; i++) ideal[i] = double.Parse(fields[i + inputCount], CSVformat); result.Add(input, ideal); } } var rand = new Random(DateTime.Now.Millisecond); return (randomize ? new BasicMLDataSet(result.OrderBy(r => rand.Next()).ToList()) : new BasicMLDataSet(result)); }
public int ScorePilot() { var sim = new LanderSimulator(); while (sim.Flying) { IMLData input = new BasicMLData(3); input[0] = _fuelStats.Normalize(sim.Fuel); input[1] = _altitudeStats.Normalize(sim.Altitude); input[2] = _velocityStats.Normalize(sim.Velocity); IMLData output = _network.Compute(input); double value = output[0]; bool thrust; if (value > 0) { thrust = true; if (_track) Console.WriteLine(@"THRUST"); } else thrust = false; sim.Turn(thrust); if (_track) Console.WriteLine(sim.Telemetry()); } return (sim.Score); }
/// <summary> /// Generate a random training set. /// </summary> /// <param name="seed">The seed value to use, the same seed value will always produce /// the same results.</param> /// <param name="count">How many training items to generate.</param> /// <param name="inputCount">How many input numbers.</param> /// <param name="idealCount">How many ideal numbers.</param> /// <param name="min">The minimum random number.</param> /// <param name="max">The maximum random number.</param> /// <returns>The random training set.</returns> public static BasicMLDataSet Generate(long seed, int count, int inputCount, int idealCount, double min, double max) { var rand = new LinearCongruentialGenerator(seed); var result = new BasicMLDataSet(); for (int i = 0; i < count; i++) { var inputData = new BasicMLData(inputCount); for (int j = 0; j < inputCount; j++) { inputData[j] = rand.Range(min, max); } var idealData = new BasicMLData(idealCount); for (int j = 0; j < idealCount; j++) { idealData[j] = rand.Range(min, max); } var pair = new BasicMLDataPair(inputData, idealData); result.Add(pair); } return result; }
/// <summary> /// Load a CSV file into a memory dataset. /// </summary> /// /// <param name="format">The CSV format to use.</param> /// <param name="filename">The filename to load.</param> /// <param name="headers">True if there is a header line.</param> /// <param name="inputSize">The input size. Input always comes first in a file.</param> /// <param name="idealSize">The ideal size, 0 for unsupervised.</param> /// <returns>A NeuralDataSet that holds the contents of the CSV file.</returns> public static IMLDataSet LoadCSVTOMemory(CSVFormat format, String filename, bool headers, int inputSize, int idealSize) { var result = new BasicMLDataSet(); var csv = new ReadCSV(filename, headers, format); while (csv.Next()) { BasicMLData ideal = null; int index = 0; var input = new BasicMLData(inputSize); for (int i = 0; i < inputSize; i++) { double d = csv.GetDouble(index++); input[i] = d; } if (idealSize > 0) { ideal = new BasicMLData(idealSize); for (int i = 0; i < idealSize; i++) { double d = csv.GetDouble(index++); ideal[i] = d; } } IMLDataPair pair = new BasicMLDataPair(input, ideal); result.Add(pair); } return result; }
private double tryMove(int[,] board, Move move) { var input = new BasicMLData (Board.SIZE * Board.SIZE); int index = 0; for (int x = 0; x < Board.SIZE; x++) { for (int y = 0; y < Board.SIZE; y++) { if (board [x, y] == aXon.TicTacToe.Game.TicTacToe.NOUGHTS) { input [index] = -1; } else if (board [x, y] == aXon.TicTacToe.Game.TicTacToe.CROSSES) { input [index] = 1; } else if (board [x, y] == aXon.TicTacToe.Game.TicTacToe.EMPTY) { input [index] = 0; } if ((x == move.x) && (y == move.y)) { input [index] = -1; } index++; } } //var input = new BasicMLData(Board.SIZE*Board.SIZE); IMLData output = this.network.Compute (input); return output [0]; }
public void TestSOM() { // create the training set IMLDataSet training = new BasicMLDataSet( SOMInput, null); // Create the neural network. var network = new SOMNetwork(4, 2) {Weights = new Matrix(MatrixArray)}; var train = new BasicTrainSOM(network, 0.4, training, new NeighborhoodSingle()) {ForceWinner = true}; int iteration = 0; for (iteration = 0; iteration <= 100; iteration++) { train.Iteration(); } IMLData data1 = new BasicMLData( SOMInput[0]); IMLData data2 = new BasicMLData( SOMInput[1]); int result1 = network.Classify(data1); int result2 = network.Classify(data2); Assert.IsTrue(result1 != result2); }
/// <inheritdoc /> public double CalculateScore(IMLMethod genome) { var prg = (EncogProgram) genome; var pop = (PrgPopulation) prg.Population; IMLData inputData = new BasicMLData(pop.Context.DefinedVariables.Count); prg.Compute(inputData); return 0; }
public void TestBufferData() { File.Delete(Filename); var set = new BufferedMLDataSet(Filename); set.BeginLoad(2, 1); for (int i = 0; i < XOR.XORInput.Length; i++) { var input = new BasicMLData(XOR.XORInput[i]); var ideal = new BasicMLData(XOR.XORIdeal[i]); set.Add(input, ideal); } set.EndLoad(); XOR.TestXORDataSet(set); }
public IntPair Query(int resolution) { // first, create the input data int index = 0; BasicMLData inputData = new BasicMLData(resolution * resolution); double pixelSize = 2.0 / resolution; double orig = -1.0 + (pixelSize / 2.0); double yReal = orig; for (int y = 0; y < resolution; y++, yReal += pixelSize) { double xReal = orig; for (int x = 0; x < resolution; x++, xReal += pixelSize) { inputData.Data[index] = this.test.GetPixel(xReal, yReal); index++; } } // second, query the network output = ((NEATNetwork)this.phenotype).Compute(inputData); // finally, process the output minActivation = Double.PositiveInfinity; maxActivation = Double.NegativeInfinity; int maxIndex = 0; for (int i = 0; i < output.Count; i++) { double d = output[i]; if (d > maxActivation) { maxActivation = d; maxIndex = i; } else if (d < minActivation) { minActivation = d; } } int yy = maxIndex / resolution; int xx = maxIndex - (yy * resolution); return new IntPair(xx, yy); }
public static IMLDataSet GenerateSingleDataRange(EncogFunction task, double start, double stop, double step) { BasicMLDataSet result = new BasicMLDataSet(); double current = start; while (current <= stop) { BasicMLData input = new BasicMLData(1); input[0] = current; BasicMLData ideal = new BasicMLData(1); ideal[0] = task(current); result.Add(input, ideal); current += step; } return result; }
public static IMLDataSet CreateNoisyXORDataSet(int count) { var result = new BasicMLDataSet(); for (int i = 0; i < count; i++) { for (int j = 0; j < 4; j++) { var inputData = new BasicMLData(XORInput[j]); var idealData = new BasicMLData(XORIdeal[j]); var pair = new BasicMLDataPair(inputData, idealData); inputData[0] = inputData[0] + RangeRandomizer.Randomize(-0.1, 0.1); inputData[1] = inputData[1] + RangeRandomizer.Randomize(-0.1, 0.1); result.Add(pair); } } return result; }
public int ScorePilot() { while (sim.Traveling) { var input = new BasicMLData(2); input[0] = sim.DistanceToDestination; input[1] = _hStats.Normalize(sim.Heading); IMLData output = _network.Compute(input); double f = output[0]; double l = output[1]; double r = output[2]; double rev = output[3]; var dirs = new Dictionary<CommandDirection, double> { {CommandDirection.MoveForward, f}, {CommandDirection.TurnLeft, l}, {CommandDirection.TurnRight, r}, {CommandDirection.MoveInReverse, rev} }; KeyValuePair<CommandDirection, double> d = dirs.First(v => v.Value == 1.0); CommandDirection thrust = d.Key; sim.Turn(thrust); lock (RobotContol.ConsoleLock) { if (_track) { sim.Telemetry(); switch (thrust) { default: Thread.Sleep(50); break; } } } } return (sim.Score); }
public override int Classify(double[] input) { double[,] sample = new double[1, input.Length]; #region convert to LDA format for (int i = 0; i < input.Length; i++) { sample[0, i] = input[i]; } #endregion double[,] projectedSample = _lda.Transform(sample); #region convert to NN format double[] projectedSample2 = new double[projectedSample.GetLength(1)]; for (int i = 0; i < projectedSample.GetLength(1); i++) { projectedSample2[i] = projectedSample[0, i]; } #endregion IMLData set = new Encog.ML.Data.Basic.BasicMLData(projectedSample2); IMLData result = method.Compute(set); #region find winner node int pos = -1; double max = -1; for (int i = 0; i < result.Count; i++) { if (result[i] > max) { pos = i; max = result[i]; } } return(pos + 1); #endregion }
public LevenbergMarquardtTraining(BasicNetwork network, IMLDataSet training) : base(TrainingImplementationType.Iterative) { if (2 != 0) { ValidateNetwork.ValidateMethodToData(network, training); if (network.OutputCount != 1) { throw new TrainingError("Levenberg Marquardt requires an output layer with a single neuron."); } this.Training = training; goto Label_0134; } Label_00A8: this._xdadd8f92d75a3aba = new double[this._xe2982b936ae423cd]; this._x878c4eb3cef19a5a = new double[this._xe2982b936ae423cd]; this._x3cb63876dda4b74a = new double[this._xe2982b936ae423cd]; if (0xff == 0) { return; } BasicMLData input = new BasicMLData(this._xb12276308f0fa6d9.InputSize); BasicMLData ideal = new BasicMLData(this._xb12276308f0fa6d9.IdealSize); this._x61830ac74d65acc3 = new BasicMLDataPair(input, ideal); if (-1 != 0) { return; } Label_0134: this._xb12276308f0fa6d9 = this.Training; this._x87a7fc6a72741c2e = network; this._x8557b7ee760663f3 = (int) this._xb12276308f0fa6d9.Count; this._xe2982b936ae423cd = this._x87a7fc6a72741c2e.Structure.CalculateSize(); this._x05fb16197e552de6 = new Matrix(this._xe2982b936ae423cd, this._xe2982b936ae423cd); this._xc410e3804222557a = this._x05fb16197e552de6.Data; this._x6ad505c7ef981b0e = 0.0; this._xd7d571ecee49d1e4 = 1.0; this._x3271cefb1a159639 = 0.1; goto Label_00A8; }
public SOMColors() { InitializeComponent(); network = CreateNetwork(); gaussian = new NeighborhoodRBF(RBFEnum.Gaussian, WIDTH, HEIGHT); train = new BasicTrainSOM(network, 0.01, null, gaussian); train.ForceWinner = false; samples = new List<IMLData>(); for (int i = 0; i < 15; i++) { IMLData data = new BasicMLData(3); data.Data[0] = RangeRandomizer.Randomize(-1, 1); data.Data[1] = RangeRandomizer.Randomize(-1, 1); data.Data[2] = RangeRandomizer.Randomize(-1, 1); samples.Add(data); } train.SetAutoDecay(100, 0.8, 0.003, 30, 5); }
/// <summary> /// Process the file. /// </summary> /// /// <param name="outputFile">The output file.</param> /// <param name="method">THe method to use.</param> public void Process(FileInfo outputFile, IMLMethod method) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); IMLData output; int outputLength = _analyst.DetermineTotalInputFieldCount(); StreamWriter tw = PrepareOutputFile(outputFile); ResetStatus(); while (csv.Next()) { UpdateStatus(false); var row = new LoadedRow(csv, _outputColumns); double[] inputArray = AnalystNormalizeCSV.ExtractFields(_analyst, _analystHeaders, csv, outputLength, true); if (_series.TotalDepth > 1) { inputArray = _series.Process(inputArray); } if (inputArray != null) { IMLData input = new BasicMLData(inputArray); // evaluation data if ((method is IMLClassification) && !(method is IMLRegression)) { // classification only? output = new BasicMLData(1); output[0] = ((IMLClassification) method).Classify(input); } else { // regression output = ((IMLRegression) method).Compute(input); } // skip file data int index = _fileColumns; int outputIndex = 0; // display output foreach (AnalystField field in _analyst.Script.Normalize.NormalizedFields) { if (_analystHeaders.Find(field.Name) != -1) { if (field.Output) { if (field.Classify) { // classification ClassItem cls = field.DetermineClass( outputIndex, output.Data); outputIndex += field.ColumnsNeeded; if (cls == null) { row.Data[index++] = "?Unknown?"; } else { row.Data[index++] = cls.Name; } } else { // regression double n = output[outputIndex++]; n = field.DeNormalize(n); row.Data[index++] = Format .Format(n, Precision); } } } } } WriteRow(tw, row); } ReportDone(false); tw.Close(); csv.Close(); }
public void Predict(BasicNetwork network) { Console.WriteLine(@"Year Actual Predict Closed Loop Predict Denormalized Value Real Value"); for (int year = EvaluateStart; year < EvaluateEnd; year++) { // calculate based on actual data IMLData input = new BasicMLData(WindowSize); for (var i = 0; i < input.Count; i++) { input.Data[i] = _normalizedSunspots[(year - WindowSize) + i]; } IMLData output = network.Compute(input); double prediction = output.Data[0]; _closedLoopSunspots[year] = prediction; // calculate "closed loop", based on predicted data for (var i = 0; i < input.Count; i++) { input.Data[i] = _closedLoopSunspots[(year - WindowSize) + i]; } output = network.Compute(input); double closedLoopPrediction = output.Data[0]; // display Console.WriteLine((StartingYear + year) + @" " + Format.FormatDouble(_normalizedSunspots[year], 5) + @" " + Format.FormatDouble(prediction, 5) + @" " + Format.FormatDouble(closedLoopPrediction, 5) + @" Accuracy:" + Format.FormatDouble(_normalizedSunspots[year] - prediction, 5) + " Denormalized:" + array.Stats.DeNormalize(prediction) + " Real value:" + Sunspots[year]); } }
/// <summary> /// Compute the output for this network. /// </summary> /// /// <param name="input">The input.</param> /// <param name="output">The output.</param> public void Compute(double[] input, double[] output) { var input2 = new BasicMLData(input); IMLData output2 = Compute(input2); output2.CopyTo(output, 0, output2.Count); }
public void display() { double[] present = new double[INPUT_SIZE*2]; double[] actualOutput = new double[OUTPUT_SIZE]; int index = 0; foreach (FinancialSample sample in actual.getSamples()) { if (sample.getDate().CompareTo(PREDICT_FROM) > 0) { StringBuilder str = new StringBuilder(); str.Append(sample.getDate()); str.Append(":Start="); str.Append(sample.getAmount()); actual.getInputData(index - INPUT_SIZE, present); actual.getOutputData(index - INPUT_SIZE, actualOutput); IMLData data = new BasicMLData(present); IMLData Output = network.Compute(data); double[] predict = Output.Data; str.Append(",Actual % Change="); str.Append(actualOutput[0].ToString("N2")); str.Append(",Predicted % Change= "); str.Append(predict[0].ToString("N2")); str.Append(":Difference="); ErrorCalculation error = new ErrorCalculation(); error.UpdateError(Output.Data, actualOutput, 1); str.Append(error.CalculateRMS().ToString("N2")); // Console.WriteLine(str.ToString()); } index++; } }
/// <summary> /// Compute the output from the input MLData. The individual values of the /// input will be mapped to the variables defined in the context. The order /// is the same between the input and the defined variables. The input will /// be mapped to the appropriate types. Enums will use their ordinal number. /// The result will be a single number MLData. /// </summary> /// <param name="input">The input to the program.</param> /// <returns>A single numer MLData.</returns> public IMLData Compute(IMLData input) { if (input.Count != InputCount) { throw new EACompileError("Invalid input count."); } for (int i = 0; i < input.Count; i++) { _variables.SetVariable(i, input[i]); } ExpressionValue v = RootNode.Evaluate(); VariableMapping resultMapping = ResultType; var result = new BasicMLData(1); bool success = false; switch (resultMapping.VariableType) { case EPLValueType.FloatingType: if (v.IsNumeric) { result.Data[0] = v.ToFloatValue(); success = true; } break; case EPLValueType.StringType: result.Data[0] = v.ToFloatValue(); success = true; break; case EPLValueType.BooleanType: if (v.IsBoolean) { result.Data[0] = v.ToBooleanValue() ? 1.0 : 0.0; success = true; } break; case EPLValueType.IntType: if (v.IsNumeric) { result[0] = v.ToIntValue(); success = true; } break; case EPLValueType.EnumType: if (v.IsEnum) { result.Data[0] = v.ToIntValue(); success = true; } break; } if (!success) { throw new EARuntimeError("EncogProgram produced " + v.ExprType.ToString() + " but " + resultMapping.VariableType.ToString() + " was expected."); } return result; }
private void ProcessNetwork() { app.WriteLine("Downsampling images..."); foreach (ImagePair pair in imageList) { var ideal = new BasicMLData(outputCount); int idx = pair.Identity; for (int i = 0; i < outputCount; i++) { if (i == idx) { ideal[i] = 1; } else { ideal[i] = -1; } } try { var img = new Bitmap(pair.File); var data = new ImageMLData(img); training.Add(data, ideal); } catch (Exception e) { app.WriteLine("Error loading: " + pair.File + ": " + e.Message); } } String strHidden1 = GetArg("hidden1"); String strHidden2 = GetArg("hidden2"); if (training.Count == 0) { app.WriteLine("No images to create network for."); return; } training.Downsample(downsampleHeight, downsampleWidth); int hidden1 = int.Parse(strHidden1); int hidden2 = int.Parse(strHidden2); network = EncogUtility.SimpleFeedForward(training .InputSize, hidden1, hidden2, training.IdealSize, true); app.WriteLine("Created network: " + network); }
/// <summary> /// Read an object. /// </summary> public Object Read(Stream mask0) { var ins0 = new EncogReadHelper(mask0); EncogFileSection section; var samples = new BasicMLDataSet(); IDictionary<String, String> networkParams = null; PNNKernelType kernel = default(PNNKernelType) /* was: null */; PNNOutputMode outmodel = default(PNNOutputMode) /* was: null */; int inputCount = 0; int outputCount = 0; double error = 0; double[] sigma = null; while ((section = ins0.ReadNextSection()) != null) { if (section.SectionName.Equals("PNN") && section.SubSectionName.Equals("PARAMS")) { networkParams = section.ParseParams(); } if (section.SectionName.Equals("PNN") && section.SubSectionName.Equals("NETWORK")) { IDictionary<String, String> paras = section.ParseParams(); inputCount = EncogFileSection.ParseInt(paras, PersistConst.InputCount); outputCount = EncogFileSection.ParseInt(paras, PersistConst.OutputCount); kernel = StringToKernel(paras[PersistConst.Kernel]); outmodel = StringToOutputMode(paras[PropertyOutputMode]); error = EncogFileSection .ParseDouble(paras, PersistConst.Error); sigma = section.ParseDoubleArray(paras, PersistConst.Sigma); } if (section.SectionName.Equals("PNN") && section.SubSectionName.Equals("SAMPLES")) { foreach (String line in section.Lines) { IList<String> cols = EncogFileSection .SplitColumns(line); int index = 0; var inputData = new BasicMLData(inputCount); for (int i = 0; i < inputCount; i++) { inputData[i] = CSVFormat.EgFormat.Parse(cols[index++]); } var idealData = new BasicMLData(inputCount); idealData[0] = CSVFormat.EgFormat.Parse(cols[index++]); IMLDataPair pair = new BasicMLDataPair(inputData, idealData); samples.Add(pair); } } } var result = new BasicPNN(kernel, outmodel, inputCount, outputCount); if (networkParams != null) { EngineArray.PutAll(networkParams, result.Properties); } result.Samples = samples; result.Error = error; if (sigma != null) { EngineArray.ArrayCopy(sigma, result.Sigma); } return result; }
/// <summary> /// Processes a double array of data of input and a second array of data for ideals /// you must input the input and output size. /// this typically builds a supervised IMLDatapair, which you must add to a IMLDataset. /// </summary> /// <param name="data">The data.</param> /// <param name="ideal">The ideal.</param> /// <param name="_inputWindow">The _input window.</param> /// <param name="_predictWindow">The _predict window.</param> /// <returns></returns> public static IMLDataPair ProcessPairs(double[] data, double[] ideal, int _inputWindow, int _predictWindow) { var result = new BasicMLDataSet(); for (int i = 0; i < data.Length; i++) { var inputData = new BasicMLData(_inputWindow); var idealData = new BasicMLData(_predictWindow); int index = i; // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = data[index++]; } index = 0; // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] = ideal[index++]; } IMLDataPair pair = new BasicMLDataPair(inputData, idealData); return pair; } return null; }
/// <summary> /// Processes the specified double serie into an IMLDataset. /// To use this method, you must provide a formated double array with the input data and the ideal data in another double array. /// The number of points in the input window makes the input array , and the predict window will create the array used in ideal. /// This method will use ALL the data inputs and ideals you have provided. /// </summary> /// <param name="datainput">The datainput.</param> /// <param name="ideals">The ideals.</param> /// <param name="_inputWindow">The _input window.</param> /// <param name="_predictWindow">The _predict window.</param> /// <returns></returns> public static IMLDataSet ProcessDoubleSerieIntoIMLDataset(List<double> datainput,List<double>ideals, int _inputWindow, int _predictWindow) { var result = new BasicMLDataSet(); //int count = 0; ////lets check if there is a modulo , if so we move forward in the List of doubles in inputs.This is just a check ////as the data of inputs should be able to fill without having . //while (datainput.Count % _inputWindow !=0) //{ // count++; //} var inputData = new BasicMLData(_inputWindow); var idealData = new BasicMLData(_predictWindow); foreach (double d in datainput) { // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = d; } } foreach (double ideal in ideals) { // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] =ideal; } } var pair = new BasicMLDataPair(inputData, idealData); result.Add(pair); return result; }
/// <summary> /// Construct the centroid. /// </summary> /// <param name="o">The object to base the centroid on.</param> public BasicMLDataCentroid(IMLData o) { this.value = (BasicMLData)o.Clone(); }
/// <summary> /// Construct the centroid. /// </summary> /// <param name="o"> The pair to base the centroid on.</param> public BasicMLDataPairCentroid(BasicMLDataPair o) { _value = (BasicMLData)o.Input.Clone(); _size = 1; }
/// <summary> /// Clone this object. /// </summary> /// <returns>A clone of this object.</returns> public object Clone() { var result = new BasicMLData(_data); return(result); }
public void Test(CPNNetwork network, String[][] pattern, double[][] input) { for (int i = 0; i < pattern.Length; i++) { IMLData inputData = new BasicMLData(input[i]); IMLData outputData = network.Compute(inputData); double angle = DetermineAngle(outputData); // display image for (int j = 0; j < HEIGHT; j++) { if (j == HEIGHT - 1) app.WriteLine("[" + pattern[i][j] + "] -> " + ((int) angle) + " deg"); else app.WriteLine("[" + pattern[i][j] + "]"); } Console.WriteLine(); } }
/// <summary> /// Process the file. /// </summary> /// /// <param name="outputFile">The output file.</param> /// <param name="method">The method to use.</param> public void Process(FileInfo outputFile, IMLRegression method) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); if (method.InputCount > _inputCount) { throw new AnalystError("This machine learning method has " + method.InputCount + " inputs, however, the data has " + _inputCount + " inputs."); } var input = new BasicMLData(method.InputCount); StreamWriter tw = AnalystPrepareOutputFile(outputFile); ResetStatus(); while (csv.Next()) { UpdateStatus(false); var row = new LoadedRow(csv, _idealCount); int dataIndex = 0; // load the input data for (int i = 0; i < _inputCount; i++) { String str = row.Data[i]; double d = Format.Parse(str); input[i] = d; dataIndex++; } // do we need to skip the ideal values? dataIndex += _idealCount; // compute the result IMLData output = method.Compute(input); // display the computed result for (int i = 0; i < _outputCount; i++) { double d = output[i]; row.Data[dataIndex++] = Format.Format(d, Precision); } WriteRow(tw, row); } ReportDone(false); tw.Close(); csv.Close(); }
public void Predict(BasicNetwork network) { Console.WriteLine(@"Year Actual Predict Closed Loop Predict Denormalized Value Real Value"); for (var year = EvaluateStart; year < EvaluateEnd; year++) { // calculate based on actual data var input = new BasicMLData(WindowSize); for (var i = 0; i < input.Count; i++) { input[i] = _normalizedForexPair[(year - WindowSize) + i]; } IMLData output = network.Compute(input); var prediction = output[0]; _closedLoopForexPair[year] = prediction; // calculate "closed loop", based on predicted data for (var i = 0; i < input.Count; i++) { input[i] = _closedLoopForexPair[(year - WindowSize) + i]; } output = network.Compute(input); var closedLoopPrediction = output[0]; // display Console.WriteLine("{0} {1} {2} {3} Accuracy:{4} Denormalized:{5} Real value:{6}", (StartingYear + year), Format.FormatDouble(_normalizedForexPair[year], 5), Format.FormatDouble(prediction, 5), Format.FormatDouble(closedLoopPrediction, 5), Format.FormatDouble(_normalizedForexPair[year] - prediction, 5), array.Stats.DeNormalize(prediction), ForexPair[year]); } }
/// <summary> /// Compute the output from this synapse. /// </summary> /// /// <param name="input">The input to this synapse.</param> /// <returns>The output from this synapse.</returns> public virtual IMLData Compute(IMLData input) { IMLData result = new BasicMLData(_outputCount); if (_neurons.Count == 0) { throw new NeuralNetworkError( "This network has not been evolved yet, it has no neurons in the NEAT synapse."); } int flushCount = 1; if (_snapshot) { flushCount = _networkDepth; } // iterate through the network FlushCount times for (int i = 0; i < flushCount; ++i) { int outputIndex = 0; int index = 0; result.Clear(); // populate the input neurons while (_neurons[index].NeuronType == NEATNeuronType.Input) { _neurons[index].Output = input[index]; index++; } // set the bias neuron _neurons[index++].Output = 1; while (index < _neurons.Count) { NEATNeuron currentNeuron = _neurons[index]; double sum = 0; foreach (NEATLink link in currentNeuron.InboundLinks) { double weight = link.Weight; double neuronOutput = link.FromNeuron.Output; sum += weight*neuronOutput; } var d = new double[1]; d[0] = sum/currentNeuron.ActivationResponse; _activationFunction.ActivationFunction(d, 0, d.Length); _neurons[index].Output = d[0]; if (currentNeuron.NeuronType == NEATNeuronType.Output) { result[outputIndex++] = currentNeuron.Output; } index++; } } _outputActivationFunction.ActivationFunction(result.Data, 0, result.Count); return result; }
/// <summary> /// Construct the LMA object. /// </summary> /// /// <param name="network">The network to train. Must have a single output neuron.</param> /// <param name="training">The training data to use. Must be indexable.</param> public LevenbergMarquardtTraining(BasicNetwork network, IMLDataSet training) : base(TrainingImplementationType.Iterative) { ValidateNetwork.ValidateMethodToData(network, training); if (network.OutputCount != 1) { throw new TrainingError( "Levenberg Marquardt requires an output layer with a single neuron."); } Training = training; _indexableTraining = Training; _network = network; _trainingLength = (int) _indexableTraining.Count; _parametersLength = _network.Structure.CalculateSize(); _hessianMatrix = new Matrix(_parametersLength, _parametersLength); _hessian = _hessianMatrix.Data; _alpha = 0.0d; _beta = 1.0d; _lambda = 0.1d; _deltas = new double[_parametersLength]; _gradient = new double[_parametersLength]; _diagonal = new double[_parametersLength]; var input = new BasicMLData( _indexableTraining.InputSize); var ideal = new BasicMLData( _indexableTraining.IdealSize); _pair = new BasicMLDataPair(input, ideal); }
/// <summary> /// Write an array. /// </summary> /// <param name="data">The data to write.</param> /// <param name="inputCount">How much of the data is input.</param> public void Write(double[] data, int inputCount) { if (_idealCount == 0) { var inputData = new BasicMLData(data); _dataset.Add(inputData); } else { var inputData = new BasicMLData( _inputCount); var idealData = new BasicMLData( _idealCount); int index = 0; for (int i = 0; i < _inputCount; i++) { inputData[i] = data[index++]; } for (int i = 0; i < _idealCount; i++) { idealData[i] = data[index++]; } _dataset.Add(inputData, idealData); } }
/// <summary> /// Construct the centroid. /// </summary> /// <param name="o">The object to base the centroid on.</param> public BasicMLDataCentroid(IMLData o) { this._value = (BasicMLData)o.Clone(); _size = 1; }