/// <summary> /// Trains a random trainer. /// </summary> /// <param name="inputs">The inputs.</param> /// <param name="predictWindow">The predict window.</param> public static void RandomTrainerMethod(int inputs, int predictWindow) { double[] firstinput = MakeInputs(inputs); double[] SecondInput = MakeInputs(inputs); double[] ThirdInputs = MakeInputs(inputs); double[] FourthInputs = MakeInputs(inputs); var pair = SuperUtilsTrainer.ProcessPairs(firstinput, FourthInputs, inputs, predictWindow); var pair2 = SuperUtilsTrainer.ProcessPairs(SecondInput, FourthInputs, inputs, predictWindow); var pair3 = SuperUtilsTrainer.ProcessPairs(ThirdInputs, FourthInputs, inputs, predictWindow); var pair4 = SuperUtilsTrainer.ProcessPairs(FourthInputs, FourthInputs, inputs, predictWindow); BasicMLDataSet SuperSet = new BasicMLDataSet(); SuperSet.Add(pair); SuperSet.Add(pair2); SuperSet.Add(pair3); SuperSet.Add(pair4); SupportVectorMachine machine = Create(SuperSet, inputs); SVMTrain train = new SVMTrain(machine, SuperSet); /// var network = (BasicNetwork)CreateEval.CreateElmanNetwork(SuperSet.InputSize, SuperSet.IdealSize); //double error = CreateEval.TrainNetworks(machine, SuperSet); TrainSVM(train, machine); //Lets create an evaluation. // Console.WriteLine(@"Last error rate on random trainer:" + error); }
public BasicMLDataSet Convert(List <StockQuote> input) { var dataset = new BasicMLDataSet(); var list = input as List <StockQuote>; for (var i = 2; i < list?.Count; ++i) { var openChange = (list[i - 1].Open - list[i - 2].Open) / list[i - 2].Open; var highChange = (list[i - 1].High - list[i - 2].High) / list[i - 2].High; var lowChange = (list[i - 1].Low - list[i - 2].Low) / list[i - 2].Low; var closeChange = (list[i - 1].Close - list[i - 2].Close) / list[i - 2].Close; double avgVolatilityChange; if ((list[i - 2].High - list[i - 2].Low) == 0) { avgVolatilityChange = 0.0; } else { avgVolatilityChange = ((list[i - 1].High - list[i - 1].Low) - (list[i - 2].High - list[i - 2].Low)) / (list[i - 2].High - list[i - 2].Low); } //var volChange = list[i - 1].Close - list[i - 2].Close / list[i - 2].Close; var inputQuote = new BasicMLData(new[] { openChange, highChange, lowChange, closeChange, avgVolatilityChange }); var expValue = (list[i - 1].High - list[i - 1].Low) == 0.0 ? 0.0 : ((list[i].High - list[i].Low) - (list[i - 1].High - list[i - 1].Low)) / (list[i - 1].High - list[i - 1].Low); var expected = new BasicMLData(new double[] { expValue }); dataset.Add(inputQuote, expected); } return(dataset); }
public static IMLDataSet GenerateTraining() { var result = new BasicMLDataSet(); for (int i = 0; i < DIGITS.Length; i++) { var ideal = new BasicMLData(DIGITS.Length); // setup input IMLData input = Image2data(DIGITS[i]); // setup ideal for (int j = 0; j < DIGITS.Length; j++) { if (j == i) { ideal[j] = 1; } else { ideal[j] = -1; } } // add training element result.Add(input, ideal); } return(result); }
public void Execute(IExampleInterface app) { this.app = app; // Create the neural network. BasicLayer hopfield; var network = new HopfieldNetwork(4); // This pattern will be trained bool[] pattern1 = { true, true, false, false }; // This pattern will be presented bool[] pattern2 = { true, false, false, false }; IMLData result; var data1 = new BiPolarMLData(pattern1); var data2 = new BiPolarMLData(pattern2); var set = new BasicMLDataSet(); set.Add(data1); // train the neural network with pattern1 app.WriteLine("Training Hopfield network with: " + FormatBoolean(data1)); network.AddPattern(data1); // present pattern1 and see it recognized result = network.Compute(data1); app.WriteLine("Presenting pattern:" + FormatBoolean(data1) + ", and got " + FormatBoolean(result)); // Present pattern2, which is similar to pattern 1. Pattern 1 // should be recalled. result = network.Compute(data2); app.WriteLine("Presenting pattern:" + FormatBoolean(data2) + ", and got " + FormatBoolean(result)); }
/// <summary> /// Processes the specified double serie into an IMLDataset. /// To use this method, you must provide a formated double array. /// The number of points in the input window makes the input array , and the predict window will create the array used in ideal. /// Example you have an array with 1, 2, 3 , 4 , 5. /// You can use this method to make an IMLDataset 4 inputs and 1 ideal (5). /// </summary> /// <param name="data">The data.</param> /// <param name="_inputWindow">The _input window.</param> /// <param name="_predictWindow">The _predict window.</param> /// <returns></returns> public static IMLDataSet ProcessDoubleSerieIntoIMLDataset(double[] data, int _inputWindow, int _predictWindow) { var result = new BasicMLDataSet(); int totalWindowSize = _inputWindow + _predictWindow; int stopPoint = data.Length - totalWindowSize; for (int i = 0; i < stopPoint; i++) { var inputData = new BasicMLData(_inputWindow); var idealData = new BasicMLData(_predictWindow); int index = i; // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = data[index++]; } // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] = data[index++]; } var pair = new BasicMLDataPair(inputData, idealData); result.Add(pair); } return(result); }
/// <summary> /// Generate a random training set. /// </summary> /// <param name="seed">The seed value to use, the same seed value will always produce /// the same results.</param> /// <param name="count">How many training items to generate.</param> /// <param name="inputCount">How many input numbers.</param> /// <param name="idealCount">How many ideal numbers.</param> /// <param name="min">The minimum random number.</param> /// <param name="max">The maximum random number.</param> /// <returns>The random training set.</returns> public static BasicMLDataSet Generate(long seed, int count, int inputCount, int idealCount, double min, double max) { var rand = new LinearCongruentialGenerator(seed); var result = new BasicMLDataSet(); for (int i = 0; i < count; i++) { var inputData = new BasicMLData(inputCount); for (int j = 0; j < inputCount; j++) { inputData[j] = rand.Range(min, max); } var idealData = new BasicMLData(idealCount); for (int j = 0; j < idealCount; j++) { idealData[j] = rand.Range(min, max); } var pair = new BasicMLDataPair(inputData, idealData); result.Add(pair); } return(result); }
/// <summary> /// Processes the specified double serie into an IMLDataset. /// To use this method, you must provide a formated double array with the input data and the ideal data in another double array. /// The number of points in the input window makes the input array , and the predict window will create the array used in ideal. /// This method will use ALL the data inputs and ideals you have provided. /// </summary> /// <param name="datainput">The datainput.</param> /// <param name="ideals">The ideals.</param> /// <param name="_inputWindow">The _input window.</param> /// <param name="_predictWindow">The _predict window.</param> /// <returns></returns> public static IMLDataSet ProcessDoubleSerieIntoIMLDataset(List <double> datainput, List <double> ideals, int _inputWindow, int _predictWindow) { var result = new BasicMLDataSet(); //int count = 0; ////lets check if there is a modulo , if so we move forward in the List of doubles in inputs.This is just a check ////as the data of inputs should be able to fill without having . //while (datainput.Count % _inputWindow !=0) //{ // count++; //} var inputData = new BasicMLData(_inputWindow); var idealData = new BasicMLData(_predictWindow); foreach (double d in datainput) { // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = d; } } foreach (double ideal in ideals) { // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] = ideal; } } var pair = new BasicMLDataPair(inputData, idealData); result.Add(pair); return(result); }
/// <summary> /// Load a CSV file into a memory dataset. /// </summary> /// /// <param name="format">The CSV format to use.</param> /// <param name="filename">The filename to load.</param> /// <param name="headers">True if there is a header line.</param> /// <param name="inputSize">The input size. Input always comes first in a file.</param> /// <param name="idealSize">The ideal size, 0 for unsupervised.</param> /// <returns>A NeuralDataSet that holds the contents of the CSV file.</returns> public static IMLDataSet LoadCSVTOMemory(CSVFormat format, String filename, bool headers, int inputSize, int idealSize) { var result = new BasicMLDataSet(); var csv = new ReadCSV(filename, headers, format); while (csv.Next()) { BasicMLData ideal = null; int index = 0; var input = new BasicMLData(inputSize); for (int i = 0; i < inputSize; i++) { double d = csv.GetDouble(index++); input[i] = d; } if (idealSize > 0) { ideal = new BasicMLData(idealSize); for (int i = 0; i < idealSize; i++) { double d = csv.GetDouble(index++); ideal[i] = d; } } IMLDataPair pair = new BasicMLDataPair(input, ideal); result.Add(pair); } return(result); }
public void Execute(IExampleInterface app) { this.app = app; // Create the neural network. BasicLayer hopfield; var network = new HopfieldNetwork(4); // This pattern will be trained bool[] pattern1 = {true, true, false, false}; // This pattern will be presented bool[] pattern2 = {true, false, false, false}; IMLData result; var data1 = new BiPolarMLData(pattern1); var data2 = new BiPolarMLData(pattern2); var set = new BasicMLDataSet(); set.Add(data1); // train the neural network with pattern1 app.WriteLine("Training Hopfield network with: " + FormatBoolean(data1)); network.AddPattern(data1); // present pattern1 and see it recognized result = network.Compute(data1); app.WriteLine("Presenting pattern:" + FormatBoolean(data1) + ", and got " + FormatBoolean(result)); // Present pattern2, which is similar to pattern 1. Pattern 1 // should be recalled. result = network.Compute(data2); app.WriteLine("Presenting pattern:" + FormatBoolean(data2) + ", and got " + FormatBoolean(result)); }
public static IMLDataSet LoadCSVToDataSet(FileInfo fileInfo, int inputCount, int outputCount, bool randomize = true, bool headers = true) { BasicMLDataSet result = new BasicMLDataSet(); CultureInfo CSVformat = new CultureInfo("en"); using (TextFieldParser parser = new TextFieldParser(fileInfo.FullName)) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); if (headers) parser.ReadFields(); while (!parser.EndOfData) { //Processing row string[] fields = parser.ReadFields(); var input = new BasicMLData(inputCount); for (int i = 0; i < inputCount; i++) input[i] = double.Parse(fields[i], CSVformat); var ideal = new BasicMLData(outputCount); for (int i = 0; i < outputCount; i++) ideal[i] = double.Parse(fields[i + inputCount], CSVformat); result.Add(input, ideal); } } var rand = new Random(DateTime.Now.Millisecond); return (randomize ? new BasicMLDataSet(result.OrderBy(r => rand.Next()).ToList()) : new BasicMLDataSet(result)); }
/// <summary> /// Process the array. /// </summary> /// /// <param name="data">The array to process.</param> /// <returns>A neural data set that contains the time-series.</returns> public IMLDataSet Process(double[] data) { IMLDataSet result = new BasicMLDataSet(); int totalWindowSize = _inputWindow + _predictWindow; int stopPoint = data.Length - totalWindowSize; for (int i = 0; i < stopPoint; i++) { IMLData inputData = new BasicMLData(_inputWindow); IMLData idealData = new BasicMLData(_predictWindow); int index = i; // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = data[index++]; } // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] = data[index++]; } IMLDataPair pair = new BasicMLDataPair(inputData, idealData); result.Add(pair); } return(result); }
/// <summary> /// Load a CSV file into a memory dataset. /// </summary> /// /// <param name="format">The CSV format to use.</param> /// <param name="filename">The filename to load.</param> /// <param name="headers">True if there is a header line.</param> /// <param name="inputSize">The input size. Input always comes first in a file.</param> /// <param name="idealSize">The ideal size, 0 for unsupervised.</param> /// <returns>A NeuralDataSet that holds the contents of the CSV file.</returns> public static IMLDataSet LoadCSVTOMemory(CSVFormat format, String filename, bool headers, int inputSize, int idealSize) { var result = new BasicMLDataSet(); var csv = new ReadCSV(filename, headers, format); while (csv.Next()) { BasicMLData ideal = null; int index = 0; var input = new BasicMLData(inputSize); for (int i = 0; i < inputSize; i++) { double d = csv.GetDouble(index++); input[i] = d; } if (idealSize > 0) { ideal = new BasicMLData(idealSize); for (int i = 0; i < idealSize; i++) { double d = csv.GetDouble(index++); ideal[i] = d; } } IMLDataPair pair = new BasicMLDataPair(input, ideal); result.Add(pair); } return result; }
/// <summary> /// Generate a random training set. /// </summary> /// <param name="seed">The seed value to use, the same seed value will always produce /// the same results.</param> /// <param name="count">How many training items to generate.</param> /// <param name="inputCount">How many input numbers.</param> /// <param name="idealCount">How many ideal numbers.</param> /// <param name="min">The minimum random number.</param> /// <param name="max">The maximum random number.</param> /// <returns>The random training set.</returns> public static BasicMLDataSet Generate(long seed, int count, int inputCount, int idealCount, double min, double max) { var rand = new LinearCongruentialGenerator(seed); var result = new BasicMLDataSet(); for (int i = 0; i < count; i++) { IMLData inputData = new BasicMLData(inputCount); for (int j = 0; j < inputCount; j++) { inputData.Data[j] = rand.Range(min, max); } IMLData idealData = new BasicMLData(idealCount); for (int j = 0; j < idealCount; j++) { idealData[j] = rand.Range(min, max); } var pair = new BasicMLDataPair(inputData, idealData); result.Add(pair); } return result; }
/// <summary> /// Called to load training data for a company. This is how the training data is actually created. /// To prepare input data for recognition use the CreateData method. The training set will be /// added to. This allows the network to learn from multiple companies if this method is called /// multiple times. /// </summary> /// <param name="symbol">The ticker symbol.</param> /// <param name="training">The training set to add to.</param> /// <param name="from">Beginning date</param> /// <param name="to">Ending date</param> public void LoadCompany(String symbol, BasicMLDataSet training, DateTime from, DateTime to) { IMarketLoader loader = new YahooFinanceLoader(); var ticker = new TickerSymbol(symbol); IList <MarketDataType> dataNeeded = new List <MarketDataType>(); dataNeeded.Add(MarketDataType.AdjustedClose); dataNeeded.Add(MarketDataType.Close); dataNeeded.Add(MarketDataType.Open); dataNeeded.Add(MarketDataType.High); dataNeeded.Add(MarketDataType.Low); var results = (List <LoadedMarketData>)loader.Load(ticker, dataNeeded, from, to); results.Sort(); for (var index = PredictWindow; index < results.Count - EvalWindow; index++) { var data = results[index]; // determine bull or bear position, or neither var bullish = false; var bearish = false; for (int search = 1; search <= EvalWindow; search++) { var data2 = results[index + search]; var priceBase = data.GetData(MarketDataType.AdjustedClose); var priceCompare = data2.GetData(MarketDataType.AdjustedClose); var diff = priceCompare - priceBase; var percent = diff / priceBase; if (percent > BullPercent) { bullish = true; } else if (percent < BearPercent) { bearish = true; } } IMLDataPair pair = null; if (bullish) { pair = CreateData(results, index, true); } else if (bearish) { pair = CreateData(results, index, false); } if (pair != null) { training.Add(pair); } } }
public BasicMLDataSet ConvertToHighPred(List <double[]> matrix) { var dataset = new BasicMLDataSet(); for (var i = 0; i < matrix[0].Length - 1; ++i) { dataset.Add(new BasicMLData(new[] { matrix[0][i], matrix[1][i], matrix[2][i], matrix[3][i], matrix[4][i] }), new BasicMLData(new[] { matrix[1][i + 1] })); } return(dataset); }
private static BasicMLDataSet MakeAsets(int inputs, int predictWindow) { double[] firstinput = MakeInputs(inputs); double[] SecondInput = MakeInputs(inputs); double[] ThirdInputs = MakeInputs(inputs); double[] FourthInputs = MakeInputs(inputs); var pair = SuperUtilsTrainer.ProcessPairs(firstinput, FourthInputs, inputs, predictWindow); var pair2 = SuperUtilsTrainer.ProcessPairs(SecondInput, FourthInputs, inputs, predictWindow); var pair3 = SuperUtilsTrainer.ProcessPairs(ThirdInputs, FourthInputs, inputs, predictWindow); var pair4 = SuperUtilsTrainer.ProcessPairs(FourthInputs, FourthInputs, inputs, predictWindow); BasicMLDataSet SuperSet = new BasicMLDataSet(); SuperSet.Add(pair); SuperSet.Add(pair2); SuperSet.Add(pair3); SuperSet.Add(pair4); return(SuperSet); }
/// <summary> /// Processes the specified data array in an IMLDataset. /// You can send a [][] array directly with this method. /// </summary> /// <param name="data">The data.</param> /// <returns></returns> public IMLDataSet Process(double[][] data) { IMLDataSet result = new BasicMLDataSet(); foreach (double[] doubles in data) { result.Add(ProcessToPair(doubles)); } return(result); }
/// <summary> /// Trains a random trainer. /// </summary> /// <param name="inputs">The inputs.</param> /// <param name="predictWindow">The predict window.</param> public static double RandomTrainerMethod(int inputs, int predictWindow) { double[] firstinput = MakeInputs(inputs); double[] SecondInput = MakeInputs(inputs); double[] ThirdInputs = MakeInputs(inputs); double[] FourthInputs = MakeInputs(inputs); double[] inp5 = MakeInputs(inputs); double[] inp6 = MakeInputs(inputs); var pair = TrainerHelper.ProcessPairs(firstinput, firstinput, inputs, predictWindow); var pair2 = TrainerHelper.ProcessPairs(SecondInput, firstinput, inputs, predictWindow); var pair3 = TrainerHelper.ProcessPairs(ThirdInputs, firstinput, inputs, predictWindow); var pair4 = TrainerHelper.ProcessPairs(FourthInputs, firstinput, inputs, predictWindow); var pair5 = TrainerHelper.ProcessPairs(inp5, firstinput, inputs, predictWindow); var pair6 = TrainerHelper.ProcessPairs(inp6, firstinput, inputs, predictWindow); BasicMLDataSet SuperSet = new BasicMLDataSet(); SuperSet.Add(pair); SuperSet.Add(pair2); SuperSet.Add(pair3); SuperSet.Add(pair4); var network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationTANH(), true, SuperSet.InputSize)); network.AddLayer(new BasicLayer(new ActivationTANH(), false, 20)); network.AddLayer(new BasicLayer(new ActivationTANH(), true, 0)); network.AddLayer(new BasicLayer(new ActivationLinear(), true, predictWindow)); //var layer = new BasicLayer(new ActivationTANH(), true, SuperSet.InputSize); //layer.Network = network; network.Structure.FinalizeStructure(); network.Reset(); // var network = (BasicNetwork)CreateEval.CreateElmanNetwork(SuperSet.InputSize, SuperSet.IdealSize); return(CreateEval.TrainNetworks(network, SuperSet)); //Lets create an evaluation. //Console.WriteLine(@"Last error rate on random trainer:" + error); }
/// <summary> /// Load the binary dataset to memory. Memory access is faster. /// </summary> /// <returns>A memory dataset.</returns> public IMLDataSet LoadToMemory() { var result = new BasicMLDataSet(); foreach (IMLDataPair pair in this) { result.Add(pair); } return(result); }
/// <summary> /// Create a dataset from the clustered data. /// </summary> /// <returns>The dataset.</returns> public IMLDataSet CreateDataSet() { var result = new BasicMLDataSet(); foreach (IMLData dataItem in _data) { result.Add(dataItem); } return result; }
/// <summary> /// Create a dataset from the clustered data. /// </summary> /// <returns>The dataset.</returns> public IMLDataSet CreateDataSet() { var result = new BasicMLDataSet(); foreach (IMLData dataItem in _data) { result.Add(dataItem); } return(result); }
/// <summary> /// Trains a random trainer. /// </summary> /// <param name="inputs">The inputs.</param> /// <param name="predictWindow">The predict window.</param> public static double RandomTrainerMethod(int inputs, int predictWindow) { double[] firstinput = MakeInputs(inputs); double[] SecondInput = MakeInputs(inputs); double[] ThirdInputs = MakeInputs(inputs); double[] FourthInputs = MakeInputs(inputs); double[] inp5 = MakeInputs(inputs); double[] inp6 = MakeInputs(inputs); var pair = TrainerHelper.ProcessPairs(firstinput, firstinput, inputs, predictWindow); var pair2 = TrainerHelper.ProcessPairs(SecondInput, firstinput, inputs, predictWindow); var pair3 = TrainerHelper.ProcessPairs(ThirdInputs, firstinput, inputs, predictWindow); var pair4 = TrainerHelper.ProcessPairs(FourthInputs, firstinput, inputs, predictWindow); var pair5 = TrainerHelper.ProcessPairs(inp5, firstinput, inputs, predictWindow); var pair6 = TrainerHelper.ProcessPairs(inp6, firstinput, inputs, predictWindow); BasicMLDataSet SuperSet = new BasicMLDataSet(); SuperSet.Add(pair); SuperSet.Add(pair2); SuperSet.Add(pair3); SuperSet.Add(pair4); var network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationTANH(), true, SuperSet.InputSize)); network.AddLayer(new BasicLayer(new ActivationTANH(), false, 20)); network.AddLayer(new BasicLayer(new ActivationTANH(), true, 0)); network.AddLayer(new BasicLayer(new ActivationLinear(), true, predictWindow)); //var layer = new BasicLayer(new ActivationTANH(), true, SuperSet.InputSize); //layer.Network = network; network.Structure.FinalizeStructure(); network.Reset(); // var network = (BasicNetwork)CreateEval.CreateElmanNetwork(SuperSet.InputSize, SuperSet.IdealSize); return CreateEval.TrainNetworks(network, SuperSet); //Lets create an evaluation. //Console.WriteLine(@"Last error rate on random trainer:" + error); }
public IMLDataSet ObservationSequence(int length) { var sequence = new BasicMLDataSet(); while (length-- > 0) { sequence.Add(Observation()); } NewSequence(); return(sequence); }
/// <summary> /// Trains state inside neural network to generate new value function. /// </summary> /// <param name="currentState"></param> /// <param name="v"></param> public void Train(Board board, double v) { BasicMLDataSet trainingSet = new BasicMLDataSet(); BasicMLData ideal = new BasicMLData(1); ideal[0] = v; //trainingSet.Add(ANNAdapter.Adapt(board), ideal); trainingSet.Add(ANNAdapter.Adapt192(board), ideal); IMLTrain train = new ResilientPropagation(network, trainingSet); train.Iteration(); }
/// <summary> /// Makes a random dataset with the number of IMLDatapairs. /// Quite useful to test networks (benchmarks). /// </summary> /// <param name="inputs">The inputs.</param> /// <param name="predictWindow">The predict window.</param> /// <param name="numberofPairs">The numberof pairs.</param> /// <returns></returns> public static BasicMLDataSet MakeRandomIMLDataset(int inputs, int predictWindow, int numberofPairs) { BasicMLDataSet SuperSet = new BasicMLDataSet(); for (int i = 0; i < numberofPairs; i++) { double[] firstinput = MakeInputs(inputs); double[] secondideal = MakeInputs(inputs); IMLDataPair pair = ProcessPairs(firstinput, secondideal, inputs, predictWindow); SuperSet.Add(pair); } return(SuperSet); }
public PSO() { network = new BasicNetwork(); network.AddLayer(new BasicLayer(5)); network.AddLayer(new BasicLayer(1)); network.AddLayer(new BasicLayer(1)); network.Structure.FinalizeStructure(); network.Reset(); IMLDataSet dataSet = new BasicMLDataSet(); dataSet.Add(new BasicMLData(new double[] { 1.0, 4.0, 3.0, 4.0, 5.0 }), new BasicMLData(new double[] { 2.0, 4.0, 6.0, 8.0, 10 })); train = new NeuralPSO(network, new RangeRandomizer(0, 10), new TrainingSetScore(dataSet), 5); }
public PSO() { network = new BasicNetwork(); network.AddLayer(new BasicLayer(5)); network.AddLayer(new BasicLayer(1)); network.AddLayer(new BasicLayer(1)); network.Structure.FinalizeStructure(); network.Reset(); IMLDataSet dataSet = new BasicMLDataSet(); dataSet.Add(new BasicMLData(new double[] { 1.0, 4.0, 3.0, 4.0, 5.0}) , new BasicMLData(new double[] { 2.0, 4.0, 6.0 , 8.0, 10} )); train = new NeuralPSO(network, new RangeRandomizer(0, 10), new TrainingSetScore(dataSet),5); }
/// <summary> /// Analyze the data. This counts the records and prepares the data to be /// processed. /// </summary> /// /// <param name="theAnalyst">The analyst to use.</param> /// <param name="inputFile">The input file to analyze.</param> /// <param name="headers">True, if the input file has headers.</param> /// <param name="format">The format of the input file.</param> public void Analyze(EncogAnalyst theAnalyst, FileInfo inputFile, bool headers, CSVFormat format) { InputFilename = inputFile; ExpectInputHeaders = headers; InputFormat = format; Analyzed = true; _analyst = theAnalyst; if (OutputFormat == null) { OutputFormat = InputFormat; } _data = new BasicMLDataSet(); ResetStatus(); int recordCount = 0; int outputLength = _analyst.DetermineTotalColumns(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, InputFormat); ReadHeaders(csv); _analystHeaders = new CSVHeaders(InputHeadings); while (csv.Next() && !ShouldStop()) { UpdateStatus(true); var row = new LoadedRow(csv, 1); double[] inputArray = AnalystNormalizeCSV.ExtractFields( _analyst, _analystHeaders, csv, outputLength, true); var input = new ClusterRow(inputArray, row); _data.Add(input); recordCount++; } RecordCount = recordCount; Count = csv.ColumnCount; ReadHeaders(csv); csv.Close(); ReportDone(true); }
private static void ExtractTrainData(FileInfo trainFile) { var ctx = new Db(); int inputsCount = ctx.TrainSchemas.Where(x => x.NetworkID == networkID && x.Input == true).Select(x => x.NeuronID).Distinct().Count(); int outputsCount = ctx.TrainSchemas.Where(x => x.NetworkID == networkID && x.Input == false).Select(x => x.NeuronID).Distinct().Count(); Console.WriteLine($"in: {inputsCount}, out: {outputsCount}"); var dataset = new BasicMLDataSet(); int[] articles = ctx.TrainValues.Where(x => x.NetworkID == networkID).Select(x => x.ArticleID).Distinct().OrderBy(i => i).ToArray(); var schema = ctx.TrainSchemas.Where(x => x.NetworkID == networkID).ToArray(); int shingles; int forecasts; foreach (int articleID in articles) { var values = ctx.TrainValues.Where(x => x.NetworkID == networkID && x.ArticleID == articleID); var inputsArray = new double[inputsCount]; var idealsArray = new double[outputsCount]; shingles = 0; forecasts = 0; foreach (var value in values) { if (schema.FirstOrDefault(x => x.NeuronID == value.NeuronID).Input) { shingles++; inputsArray[value.NeuronID - 1] = value.Value; } else { forecasts++; idealsArray[value.NeuronID - inputsCount - 2] = value.Value; } } if (shingles > 0 && forecasts == outputsCount) { dataset.Add(new BasicMLData(inputsArray), new BasicMLData(idealsArray)); Console.WriteLine($"Article {articleID}: {shingles} shingles, {idealsArray[outputsCount - 1] * 100:N2}"); } else { Console.WriteLine($"Article {articleID} NOT complete !!!"); } } EncogUtility.SaveEGB(trainFile, dataset); Console.WriteLine($"Trainset written to: {trainFile.FullName}"); }
public static IMLDataSet CreateNoisyXORDataSet(int count) { var result = new BasicMLDataSet(); for (int i = 0; i < count; i++) { for (int j = 0; j < 4; j++) { var inputData = new BasicMLData(XORInput[j]); var idealData = new BasicMLData(XORIdeal[j]); var pair = new BasicMLDataPair(inputData, idealData); inputData[0] = inputData[0] + RangeRandomizer.Randomize(-0.1, 0.1); inputData[1] = inputData[1] + RangeRandomizer.Randomize(-0.1, 0.1); result.Add(pair); } } return result; }
public static IMLDataSet GenerateSingleDataRange(EncogFunction task, double start, double stop, double step) { BasicMLDataSet result = new BasicMLDataSet(); double current = start; while (current <= stop) { BasicMLData input = new BasicMLData(1); input[0] = current; BasicMLData ideal = new BasicMLData(1); ideal[0] = task(current); result.Add(input, ideal); current += step; } return result; }
public static IMLDataSet GenerateSingleDataRange(EncogFunction task, double start, double stop, double step) { BasicMLDataSet result = new BasicMLDataSet(); double current = start; while (current <= stop) { BasicMLData input = new BasicMLData(1); input[0] = current; BasicMLData ideal = new BasicMLData(1); ideal[0] = task(current); result.Add(input, ideal); current += step; } return(result); }
public static IMLDataSet CreateNoisyXORDataSet(int count) { var result = new BasicMLDataSet(); for (int i = 0; i < count; i++) { for (int j = 0; j < 4; j++) { var inputData = new BasicMLData(XORInput[j]); var idealData = new BasicMLData(XORIdeal[j]); var pair = new BasicMLDataPair(inputData, idealData); inputData[0] = inputData[0] + RangeRandomizer.Randomize(-0.1, 0.1); inputData[1] = inputData[1] + RangeRandomizer.Randomize(-0.1, 0.1); result.Add(pair); } } return(result); }
public void TestCluster() { var set = new BasicMLDataSet(); int i; for (i = 0; i < Data.Length; i++) { set.Add(new BasicMLData(Data[i])); } var kmeans = new KMeansClustering(2, set); kmeans.Iteration(); i = 1; foreach (IMLCluster cluster in kmeans.Clusters) { IMLDataSet ds = cluster.CreateDataSet(); IMLDataPair pair; pair = ds[0]; double t = pair.Input[0]; for (int j = 0; j < ds.Count; j++) { pair = ds[j]; for (j = 0; j < pair.Input.Count; j++) { if (t > 10) { Assert.IsTrue(pair.Input[j] > 10); } else { Assert.IsTrue(pair.Input[j] < 10); } } } i++; } }
public void TestCluster() { var set = new BasicMLDataSet(); int i; for (i = 0; i < Data.Length; i++) { set.Add(new BasicMLData(Data[i])); } var kmeans = new KMeansClustering(2, set); kmeans.Iteration(); i = 1; foreach (IMLCluster cluster in kmeans.Clusters) { IMLDataSet ds = cluster.CreateDataSet(); IMLDataPair pair = BasicMLDataPair.CreatePair(ds.InputSize, ds.IdealSize); ds.GetRecord(0, pair); double t = pair.InputArray[0]; for (int j = 0; j < ds.Count; j++) { ds.GetRecord(j, pair); for (j = 0; j < pair.InputArray.Length; j++) { if (t > 10) { Assert.IsTrue(pair.InputArray[j] > 10); } else { Assert.IsTrue(pair.InputArray[j] < 10); } } } i++; } }
/// <summary> /// Learn the distribution. /// </summary> /// <param name="hmm">The HMM.</param> private void LearnOpdf(HiddenMarkovModel hmm) { for (int i = 0; i < hmm.StateCount; i++) { ICollection <IMLDataPair> clusterObservations = _clusters .Cluster(i); if (clusterObservations.Count < 1) { IStateDistribution o = _modelHmm.CreateNewDistribution(); hmm.StateDistributions[i] = o; } else { var temp = new BasicMLDataSet(); foreach (IMLDataPair pair in clusterObservations) { temp.Add(pair); } hmm.StateDistributions[i].Fit(temp); } } }
public static void Run() { var basicMLDataSet = new BasicMLDataSet(); var maker = new DayDataMaker(true); maker.Init(); var dataset = maker.GetDatas(); dataset.ForEach(data => { var basicData = new BasicMLData(10) { [0] = data.TickerCloseChangePastDay, [1] = data.TickerCloseChangePast2Days, [2] = data.TickerCloseChangePast4Days, [3] = data.AverageRelationCloseChangePastDay, [4] = data.AverageRelationCloseChangePast2Days, [5] = data.AverageRelationCloseChangePast4Days, [6] = data.TickerVolTodayVsLately, [7] = data.TickerVolYesterdayVsLately, [8] = data.AverageRelationVolTodayVsLately, [9] = data.AverageRelationVolYesterdayVsLately }; basicMLDataSet.Add(basicData, new BasicMLData(1) { [0] = data.TickerCloseChangeNext }); }); EncogUtility.SaveEGB(Config.TrainingFile, basicMLDataSet); var network = EncogUtility.SimpleFeedForward(10, 20, 10, 1, true); EncogDirectoryPersistence.SaveObject(Config.NetworkFile, network); }
public IMLDataSet ObservationSequence(int length) { IMLDataSet sequence = new BasicMLDataSet(); while (length-- > 0) { sequence.Add(Observation()); } NewSequence(); return sequence; }
/// <summary> /// Analyze the data. This counts the records and prepares the data to be /// processed. /// </summary> /// <param name="theAnalyst">The analyst to use.</param> /// <param name="inputFile">The input file to analyze.</param> /// <param name="headers">True, if the input file has headers.</param> /// <param name="format">The format of the input file.</param> public void Analyze(EncogAnalyst theAnalyst, FileInfo inputFile, bool headers, CSVFormat format) { InputFilename = inputFile; ExpectInputHeaders = headers; Format = format; Analyzed = true; _analyst = theAnalyst; _data = new BasicMLDataSet(); ResetStatus(); int recordCount = 0; int outputLength = _analyst.DetermineTotalColumns(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); ReadHeaders(csv); _analystHeaders = new CSVHeaders(InputHeadings); while (csv.Next() && !ShouldStop()) { UpdateStatus(true); double[] inputArray = AnalystNormalizeCSV.ExtractFields( _analyst, _analystHeaders, csv, outputLength, true); IMLData input = new BasicMLData(inputArray); _data.Add(new BasicMLDataPair(input)); recordCount++; } RecordCount = recordCount; Count = csv.ColumnCount; ReadHeaders(csv); csv.Close(); ReportDone(true); }
/// <summary> /// Process the array. /// </summary> /// /// <param name="data">The array to process.</param> /// <returns>A neural data set that contains the time-series.</returns> public IMLDataSet Process(double[] data) { IMLDataSet result = new BasicMLDataSet(); int totalWindowSize = _inputWindow + _predictWindow; int stopPoint = data.Length - totalWindowSize; for (int i = 0; i < stopPoint; i++) { IMLData inputData = new BasicMLData(_inputWindow); IMLData idealData = new BasicMLData(_predictWindow); int index = i; // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = data[index++]; } // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] = data[index++]; } IMLDataPair pair = new BasicMLDataPair(inputData, idealData); result.Add(pair); } return result; }
public static BasicMLDataSet Generate(long seed, int count, int inputCount, int idealCount, double min, double max) { IMLData data; int num2; IMLData data2; BasicMLDataPair pair; LinearCongruentialGenerator generator = new LinearCongruentialGenerator(seed); BasicMLDataSet set = new BasicMLDataSet(); int num = 0; goto Label_0018; Label_000C: set.Add(pair); Label_0014: num++; Label_0018: if (num < count) { data = new BasicMLData(inputCount); num2 = 0; while (num2 < inputCount) { data.Data[num2] = generator.Range(min, max); num2++; } data2 = new BasicMLData(idealCount); if (((uint) count) < 0) { goto Label_000C; } if ((((uint) idealCount) & 0) != 0) { goto Label_0014; } int num3 = 0; Label_002D: if (num3 < idealCount) { data2[num3] = generator.Range(min, max); if ((((uint) idealCount) - ((uint) max)) <= uint.MaxValue) { num3++; goto Label_002D; } } else { goto Label_0032; } goto Label_00C6; } if (0 == 0) { return set; } Label_0032: pair = new BasicMLDataPair(data, data2); Label_00C6: if ((((uint) num2) & 0) == 0) { goto Label_000C; } goto Label_0018; }
public IMLDataSet CreateDataSet() { IMLDataSet set = new BasicMLDataSet(); foreach (IMLData data in this._x4a3f0a05c02f235f) { set.Add(data); } return set; }
public BasicMLDataSet ProcessDataSet(BasicMLDataSet dataToProcess) { bool flag; BasicMLDataSet set = new BasicMLDataSet(); IEnumerator<IMLDataPair> enumerator = dataToProcess.GetEnumerator(); try { IMLDataPair pair; goto Label_003E; Label_0011: pair = enumerator.Current; set.Add(new BasicMLDataPair(this.ProcessInputVector(pair.Input), this.ProcessIdealVector(pair.Ideal))); Label_003E: flag = enumerator.MoveNext(); if (flag) { goto Label_0011; } if ((((uint) flag) - ((uint) flag)) < 0) { goto Label_003E; } } finally { flag = enumerator == null; goto Label_0089; Label_006C: enumerator.Dispose(); if ((((uint) flag) & 0) != 0) { goto Label_006C; } goto Label_008D; Label_0089: if (!flag) { goto Label_006C; } Label_008D:; } return set; }
public int xa1aa8795de6d838b() { Matrix matrix; int num; double num2; double num3; double num4; double num5; double num6; ChartWindow window2; Func<double, Tuple<double, bool>> func = null; <>c__DisplayClass10 class2; int num7; bool flag; if (((uint) num3) >= 0) { double mo; this.x20aee281977480cf(); this.x0fc00f08bd4749a0(); double[] res = new double[this.x6b73aa01aa019d3a.Count]; goto Label_02AD; } if ((((uint) num4) - ((uint) num)) >= 0) { goto Label_007B; } Label_0030: num7 = res.ToList<double>().IndexOf(res.Max()); if ((((uint) num5) | 4) == 0) { goto Label_01C5; } return num7; Label_007B: if ((((uint) num4) | 0x7fffffff) == 0) { goto Label_01FF; } ChartWindow window = window2; if (func == null) { func = new Func<double, Tuple<double, bool>>(class2, this.<FindMavericNsim41>b__e); } window.barSeries.ItemsSource = Enumerable.Select<double, Tuple<double, bool>>(res, func); window.barSeries.IsSelectionEnabled = false; window.ShowDialog(); goto Label_0030; Label_017F: num6 = res.Max(); num = 0; Label_0197: flag = num < res.Length; Label_0148: if (flag) { num++; goto Label_0197; } this.xdc3df58d08a8655f(); if ((((uint) flag) & 0) != 0) { goto Label_0258; } flag = !this.xf69244535d02f4b9; if (!flag) { window2 = new ChartWindow { chart = { Title = "Обнаружение выбросов" } }; if ((((uint) num5) & 0) != 0) { return num7; } if (((uint) num6) <= uint.MaxValue) { goto Label_007B; } goto Label_0148; } if ((((uint) num) - ((uint) num2)) >= 0) { goto Label_0030; } return num7; Label_01C5: num5 = Math.Sqrt(Enumerable.Select<double, double>(res, new Func<double, double>(class2, (IntPtr) this.<FindMavericNsim41>b__d)).Sum() / ((double) res.Length)); goto Label_017F; Label_01FF: flag = num < this.x6b73aa01aa019d3a.Count; if ((((uint) num) + ((uint) num6)) < 0) { goto Label_017F; } if (flag) { this.x993b9ddd2c3f1688(num); BasicMLDataPair inputData = this.x6b73aa01aa019d3a.Data[num].Clone() as BasicMLDataPair; BasicMLDataSet data = new BasicMLDataSet(); data.Add(inputData); num2 = this.x5b0926ce641e48a7.CalculateError(data); } else { if (0 == 0) { mo = res.Sum() / ((double) res.Length); goto Label_01C5; } goto Label_02AD; } Label_0258: num3 = this.x5b0926ce641e48a7.CalculateError(this.xddda66ad7e26f074); num4 = (1.0 / ((double) this.x6b73aa01aa019d3a.Count)) + matrix[num, num]; res[num] = (num2 * num4) / (1.0 - num4); num++; goto Label_01FF; Label_02AD: matrix = this.x5416132d843fbf5e(this.x6b73aa01aa019d3a); num = 0; goto Label_01FF; }
static TrainTestData <BasicMLDataSet> GetBasicMlDataSet(string connectionStr, bool recreateDb, ILogger logger, DirectoryInfo inputDirectory, int ommitStocksSmallerThan, int ommitDeadStocksDate, Random rnProvider, decimal ratioTrainingSet) { var context = new StockEfContext(connectionStr); var unitOfWork = new StockEfUnitOfWork(context); var stocksDeserialized = default(List <Company>); var watch = Stopwatch.StartNew(); if (context.DbExists() && !recreateDb) { stocksDeserialized = unitOfWork.Stocks.GetAll().ToList(); logger.LogInfo( $@"Found {stocksDeserialized.Count} companies in Db in {watch.ElapsedMilliseconds.AsTime()}"); watch.Restart(); } else { if (context.DbExists()) { context.DropDbIfExists(); logger.LogInfo($@"Dropped Db in {watch.ElapsedMilliseconds.AsTime()}"); watch.Restart(); } context.CreateDbIfNotExists(); logger.LogInfo($@"Created Db in {watch.ElapsedMilliseconds.AsTime()}"); watch.Restart(); var directoryService = new IoService(); var stocksRaw = directoryService.ReadDirectory(inputDirectory); logger.LogInfo($@"Read {stocksRaw.Count} in {watch.ElapsedMilliseconds.AsTime()} from {inputDirectory.Name}"); watch.Restart(); stocksDeserialized = new StocksBulkDeserializer(new StocksDeserializer(new StockQuoteCsvClassMap())).Deserialize(stocksRaw); logger.LogInfo($@"Deserialized {stocksDeserialized.Count} in {watch.ElapsedMilliseconds.AsTime()}"); watch.Restart(); var bulkInserter = new CompanyBulkInserter(connectionStr); bulkInserter.BulkInsert(stocksDeserialized); logger.LogInfo($@"Saved {stocksDeserialized.Count} to {connectionStr} in {watch.ElapsedMilliseconds.AsTime()}"); watch.Restart(); } var normalizer = new StockQuotesToNormalizedMatrix(); var allStocksNormalized = new List <BasicMLDataSet>(); var matrixConverter = new MatrixToMlData(); var ommitedDueToLength = 0; var ommitedDueToInvalidity = 0; foreach (var stock in stocksDeserialized) { if (stock.Quotes.Count < ommitStocksSmallerThan) { ++ommitedDueToLength; } else if (stock.Quotes.Max(s => s.Date) < ommitDeadStocksDate) { ++ommitedDueToInvalidity; } else { allStocksNormalized.Add(matrixConverter.ConvertToHighPred(normalizer.Convert(stock.Quotes.ToList()))); } } logger.LogInfo( $@"Loaded, converted and normalized {allStocksNormalized.Count} ({allStocksNormalized.Sum(s => s.Count)} samples) in {watch.ElapsedMilliseconds.AsTime()}. Ommited { stocksDeserialized.Count - allStocksNormalized.Count }.{(ommitedDueToLength > 0 || ommitedDueToInvalidity > 0 ? " Reason:" : string.Empty)}{ (ommitedDueToLength > 0 ? $" {ommitedDueToLength} too small" : string.Empty)}{(ommitedDueToLength > 0 && ommitedDueToInvalidity > 0 ? "," : string.Empty)}{ (ommitedDueToInvalidity > 0 ? $" {ommitedDueToInvalidity} invalid" : string.Empty)}"); watch.Restart(); var trainDataSet = new BasicMLDataSet(); var testDataSet = new BasicMLDataSet(); var i = 0; for (; i < allStocksNormalized.Count * ratioTrainingSet; ++i) { foreach (var mlDataPair in allStocksNormalized[i].Data) { trainDataSet.Add(mlDataPair); } } for (; i < allStocksNormalized.Count; ++i) { foreach (var mlDataPair in allStocksNormalized[i].Data) { testDataSet.Add(mlDataPair); } } logger.LogInfo($@"Constructed training and test datasets with {trainDataSet.Count} and {testDataSet.Count} samples in {watch.ElapsedMilliseconds.AsTime()}"); watch.Restart(); trainDataSet.Data.Shuffle(rnProvider); logger.LogInfo($@"Finished shuffling trainDataSet ({trainDataSet.Count} samples) in {watch.ElapsedMilliseconds.AsTime()}"); watch.Restart(); testDataSet.Data.Shuffle(rnProvider); logger.LogInfo($@"Finished shuffling testDataSet ({testDataSet.Count} samples) in {watch.ElapsedMilliseconds.AsTime()}"); watch.Restart(); return(new TrainTestData <BasicMLDataSet> { TrainingSet = trainDataSet, TestSet = testDataSet }); }
/// <summary> /// Learn the distribution. /// </summary> /// <param name="hmm">The HMM.</param> private void LearnOpdf(HiddenMarkovModel hmm) { for (int i = 0; i < hmm.StateCount; i++) { ICollection<IMLDataPair> clusterObservations = _clusters .Cluster(i); if (clusterObservations.Count < 1) { IStateDistribution o = _modelHmm.CreateNewDistribution(); hmm.StateDistributions[i] = o; } else { var temp = new BasicMLDataSet(); foreach (IMLDataPair pair in clusterObservations) { temp.Add(pair); } hmm.StateDistributions[i].Fit(temp); } } }
/// <summary> /// Makes a random dataset with the number of IMLDatapairs. /// Quite useful to test networks (benchmarks). /// </summary> /// <param name="inputs">The inputs.</param> /// <param name="predictWindow">The predict window.</param> /// <param name="numberofPairs">The numberof pairs.</param> /// <returns></returns> public static BasicMLDataSet MakeRandomIMLDataset(int inputs, int predictWindow, int numberofPairs) { BasicMLDataSet SuperSet = new BasicMLDataSet(); for (int i = 0; i < numberofPairs;i++ ) { double[] firstinput = MakeInputs(inputs); double[] secondideal = MakeInputs(inputs); IMLDataPair pair = ProcessPairs(firstinput, secondideal, inputs, predictWindow); SuperSet.Add(pair); } return SuperSet; }
/// <summary> /// Read an object. /// </summary> public Object Read(Stream mask0) { var ins0 = new EncogReadHelper(mask0); EncogFileSection section; var samples = new BasicMLDataSet(); IDictionary <String, String> networkParams = null; PNNKernelType kernel = default(PNNKernelType) /* was: null */; PNNOutputMode outmodel = default(PNNOutputMode) /* was: null */; int inputCount = 0; int outputCount = 0; double error = 0; double[] sigma = null; while ((section = ins0.ReadNextSection()) != null) { if (section.SectionName.Equals("PNN") && section.SubSectionName.Equals("PARAMS")) { networkParams = section.ParseParams(); } if (section.SectionName.Equals("PNN") && section.SubSectionName.Equals("NETWORK")) { IDictionary <String, String> paras = section.ParseParams(); inputCount = EncogFileSection.ParseInt(paras, PersistConst.InputCount); outputCount = EncogFileSection.ParseInt(paras, PersistConst.OutputCount); kernel = StringToKernel(paras[PersistConst.Kernel]); outmodel = StringToOutputMode(paras[PropertyOutputMode]); error = EncogFileSection .ParseDouble(paras, PersistConst.Error); sigma = section.ParseDoubleArray(paras, PersistConst.Sigma); } if (section.SectionName.Equals("PNN") && section.SubSectionName.Equals("SAMPLES")) { foreach (String line in section.Lines) { IList <String> cols = EncogFileSection .SplitColumns(line); int index = 0; var inputData = new BasicMLData(inputCount); for (int i = 0; i < inputCount; i++) { inputData[i] = CSVFormat.EgFormat.Parse(cols[index++]); } var idealData = new BasicMLData(inputCount); idealData[0] = CSVFormat.EgFormat.Parse(cols[index++]); IMLDataPair pair = new BasicMLDataPair(inputData, idealData); samples.Add(pair); } } } var result = new BasicPNN(kernel, outmodel, inputCount, outputCount); if (networkParams != null) { EngineArray.PutAll(networkParams, result.Properties); } result.Samples = samples; result.Error = error; if (sigma != null) { EngineArray.ArrayCopy(sigma, result.Sigma); } return(result); }
/// <summary> /// Processes the specified data array in an IMLDataset. /// You can send a [][] array directly with this method. /// </summary> /// <param name="data">The data.</param> /// <returns></returns> public IMLDataSet Process(double[][] data) { var result = new BasicMLDataSet(); foreach (double[] doubles in data) { result.Add(ProcessToPair(doubles)); } return result; }
static void Main(string[] args) { BasicMLDataSet data_training = new BasicMLDataSet(); Random rdn = new Random(); //////////////////////////////////////////////////////////////////////////// //simulação de dados por arquivo: var neuralFile = File.ReadAllLines(@"C:\Users\bredi\Desktop\TCC\TCC\neural_1.txt"); List <string> NeuralList = new List <string>(neuralFile); double[][] entradafull = new double[NeuralList.Count][]; double[][] saidafull = new double[NeuralList.Count][]; int i = 0; foreach (var item in NeuralList) { var t = item.Split(new string[] { "::" }, StringSplitOptions.None); double[] entrada = new double[] { //System.Convert.ToDouble(t[0]),//hora System.Convert.ToDouble(t[1]), //tempA System.Convert.ToDouble(t[2]) //setA //System.Convert.ToDouble(t[3]),//tempB //System.Convert.ToDouble(t[4])//setB }; entradafull[i] = entrada; /*double a = System.Convert.ToDouble(t[5]); * if (a == 1) * a = 0.5f; * else * a = 0.5f; * double b = System.Convert.ToDouble(t[6]); * if (b == 1) * b = 0.5f; * else if (b == 0) * b = 0.5f;*/ double[] saida = new double[] { System.Convert.ToDouble(t[5])//saidaA //System.Convert.ToDouble(t[6])//saidaB }; saidafull[i] = saida; i++; data_training.Add(new BasicMLData(entrada), null); } //IMLDataSet data_training = new BasicMLDataSet(entradafull, saidafull);//ANTIGO COM SAIDA ////////////////////////////////////////////////////////// int N_entradas = 2; int tamanho_X = 100; //100 int tamanho_Y = 100; //100 int N_saidas = tamanho_X * tamanho_Y; int interacoesPlanejada = 1000; int vizinho_inicial = 50;//50 int vizinho_final = 1; double rate_inicial = 1; double rate_final = 0.1; //Criação de rede SOM.(número de entradas, número de saídas) SOMNetwork network = new SOMNetwork(N_entradas, N_saidas); network.Reset(); //Criação da função de ativação.(função gaussiana 2D, largura da rede, altura da rede) NeighborhoodRBF gaussian = new NeighborhoodRBF(RBFEnum.MexicanHat, tamanho_X, tamanho_Y); //(rede neural, taxa de aprendizado, conjunto de treinamento, função de vizinhança) BasicTrainSOM train = new BasicTrainSOM(network, 0.01, null, gaussian); train.ForceWinner = false; train.SetAutoDecay(interacoesPlanejada, rate_inicial, rate_final, vizinho_inicial, vizinho_final); //TREINAMENTO RANDOMICO: for (int decay = 0; decay < interacoesPlanejada; decay++) { var idx = int.Parse(Math.Round(rdn.NextDouble() * saidafull.Length).ToString()) - 1; if (idx == -1) { idx = 0; } var data = data_training[idx].Input; train.TrainPattern(data); train.AutoDecay(); Console.WriteLine(string.Format("Epoch {0}, Rate: {1}, Radius: {2}, Error: {3}", decay, train.LearningRate, train.Neighborhood.Radius, train.Error)); } /*for (int tx = 0; tx < interacoesPlanejada; tx++) * { * train.Iteration(); * train.AutoDecay(); * Console.WriteLine(string.Format("Epoch {0}, Rate: {1}, Radius: {2}, Error: {3}", i, train.LearningRate, train.Neighborhood.Radius, train.Error)); * }*/ ////////////////////////////////////////////////////////// //arquivo visual////////////////////////////////////////////////////////// string[,] arrayprint = new string[tamanho_X, tamanho_Y]; for (int x = 0; x < tamanho_X; x++) { for (int y = 0; y < tamanho_Y; y++) { arrayprint[x, y] = " "; } } /*for (int TempA = 15; TempA < 25; TempA++) * { * for (int SetA = 15; SetA < 25; SetA++) * { * for (int TempB = 15; TempB < 25; TempB++) * { * for (int SetB = 15; SetB < 25; SetB++) * { * BasicMLData dataentradateste = new BasicMLData(new double[] { TempA, SetA, TempB, SetB }); * var retorno = network.Classify(dataentradateste); * //Console.WriteLine(retorno + " ||| SetA: " + SetA + " | TempA: " + TempA + " ||| SetB: " + 20 + " | TempB: " + 0); * var tuple = convertToXY(retorno, tamanho_X, tamanho_Y); * var array_v = arrayprint[tuple.Item1, tuple.Item2]; * if(array_v == " ") * { * string saida = ""; * if(TempA >= SetA) * saida += "a"; * else if(TempA < SetA) * saida += "A"; * else * saida += "#"; * * if (TempB >= SetB) * saida += "b"; * else if (TempB < SetB) * saida += "B"; * else * saida += "#"; * * arrayprint[tuple.Item1, tuple.Item2] = saida; * } * } * } * * } * }*/ List <int> Lista_0 = new List <int>(); List <int> Lista_1 = new List <int>(); for (int TempA = -49; TempA < 50; TempA++) { for (int SetA = -49; SetA < 50; SetA++) { BasicMLData dataentradateste = new BasicMLData(new double[] { Normalizacao.Norm_Temp(TempA), Normalizacao.Norm_Temp(SetA) }); var retorno = network.Classify(dataentradateste); //Console.WriteLine(retorno + " ||| SetA: " + SetA + " | TempA: " + TempA + " ||| SetB: " + 20 + " | TempB: " + 0); var tuple = convertToXY(retorno, tamanho_X, tamanho_Y); var array_v = arrayprint[tuple.Item1, tuple.Item2]; if (array_v == " ") { string saida = " "; if (TempA >= SetA) { if (Lista_1.Contains(retorno)) { saida += "#"; } else { Lista_0.Add(retorno); saida += "0"; } } else if (TempA < SetA) { if (Lista_0.Contains(retorno)) { saida += "#"; } else { Lista_1.Add(retorno); saida += "1"; } } else { saida += "#"; } arrayprint[tuple.Item1, tuple.Item2] = saida; } } } StringBuilder fileContents = new StringBuilder(); for (int x = 0; x < tamanho_X; x++) { for (int y = 0; y < tamanho_Y; y++) { fileContents.Append(arrayprint[x, y]); } fileContents.AppendLine("|"); } File.WriteAllText(@"C:\Users\bredi\Documents\mapaneural.txt", fileContents.ToString()); ////////////////////////////////////////////////////////// ////salvar network: string path = Path.Combine(@"C:\Users\bredi\Desktop\TCC\TCC", "redeneural" + DateTime.Now.Ticks + ".txt"); if (File.Exists(path)) { File.Delete(path); } FileStream fs = new FileStream(path, FileMode.CreateNew, FileAccess.Write); PersistSOM persistSOM = new PersistSOM(); persistSOM.Save(fs, network); fs.Close(); ////////////////////////////////////////////////////////// //testes////////////////////////////////////////////////////////// DateTime datahora_atual = DateTime.MinValue; do { DateTime datahora = Simulation.Memory.Get().dmDateTime.DataHora; var Dados_D = Simulation.Input.Termostato_D(); var Dados_E = Simulation.Input.Termostato_E(); if (datahora >= datahora_atual.AddSeconds(.5)) { datahora_atual = datahora; //double hora = Normalizacao.Norm_DataHoraSeg(datahora); //BasicMLData dataentradateste = new BasicMLData(new double[] { hora, TempA, SetA, TempB, SetB }); //BasicMLData dataentradateste = new BasicMLData(new double[] { TempA, SetA, TempB, SetB }); BasicMLData dataentradateste = new BasicMLData(new double[] { Dados_D.TemperaturaNormalizado, Dados_D.SetPointNormalizado }); var retorno = network.Winner(dataentradateste); if (Lista_0.Contains(retorno)) { //desligar Simulation.Output.DesligarAquecedor_D(); Simulation.Output.DesligarAquecedor_E(); Console.WriteLine(retorno + " | OFF | "); } else if (Lista_1.Contains(retorno)) { //ligar Simulation.Output.LigarAquecedor_D(); Simulation.Output.LigarAquecedor_E(); Console.WriteLine(retorno + " | ON | "); } else { Console.WriteLine(retorno + " | OUT | "); } } }while (true); }
private static BasicMLDataSet MakeAsets(int inputs , int predictWindow) { double[] firstinput = MakeInputs(inputs); double[] SecondInput = MakeInputs(inputs); double[] ThirdInputs = MakeInputs(inputs); double[] FourthInputs = MakeInputs(inputs); var pair = SuperUtilsTrainer.ProcessPairs(firstinput, FourthInputs, inputs, predictWindow); var pair2 = SuperUtilsTrainer.ProcessPairs(SecondInput, FourthInputs, inputs, predictWindow); var pair3 = SuperUtilsTrainer.ProcessPairs(ThirdInputs, FourthInputs, inputs, predictWindow); var pair4 = SuperUtilsTrainer.ProcessPairs(FourthInputs, FourthInputs, inputs, predictWindow); BasicMLDataSet SuperSet = new BasicMLDataSet(); SuperSet.Add(pair); SuperSet.Add(pair2); SuperSet.Add(pair3); SuperSet.Add(pair4); return SuperSet; }
public static IMLDataSet GenerateTraining() { IMLDataSet result = new BasicMLDataSet(); for (int i = 0; i < DIGITS.Length; i++) { var ideal = new BasicMLData(DIGITS.Length); // setup input IMLData input = Image2data(DIGITS[i]); // setup ideal for (int j = 0; j < DIGITS.Length; j++) { if (j == i) ideal[j] = 1; else ideal[j] = -1; } // add training element result.Add(input, ideal); } return result; }
/// <summary> /// Called to load training data for a company. This is how the training data is actually created. /// To prepare input data for recognition use the CreateData method. The training set will be /// added to. This allows the network to learn from multiple companies if this method is called /// multiple times. /// </summary> /// <param name="symbol">The ticker symbol.</param> /// <param name="training">The training set to add to.</param> /// <param name="from">Beginning date</param> /// <param name="to">Ending date</param> public void LoadCompany(String symbol, BasicMLDataSet training, DateTime from, DateTime to) { IMarketLoader loader = new YahooFinanceLoader(); TickerSymbol ticker = new TickerSymbol(symbol); IList<MarketDataType> dataNeeded = new List<MarketDataType>(); dataNeeded.Add(MarketDataType.AdjustedClose); dataNeeded.Add(MarketDataType.Close); dataNeeded.Add(MarketDataType.Open); dataNeeded.Add(MarketDataType.High); dataNeeded.Add(MarketDataType.Low); List<LoadedMarketData> results = (List<LoadedMarketData>)loader.Load(ticker, dataNeeded, from, to); results.Sort(); for (int index = PredictWindow; index < results.Count - EvalWindow; index++) { LoadedMarketData data = results[index]; // determine bull or bear position, or neither bool bullish = false; bool bearish = false; for (int search = 1; search <= EvalWindow; search++) { LoadedMarketData data2 = results[index + search]; double priceBase = data.GetData(MarketDataType.AdjustedClose); double priceCompare = data2.GetData(MarketDataType.AdjustedClose); double diff = priceCompare - priceBase; double percent = diff / priceBase; if (percent > BullPercent) { bullish = true; } else if (percent < BearPercent) { bearish = true; } } IMLDataPair pair = null; if (bullish) { pair = CreateData(results, index, true); } else if (bearish) { pair = CreateData(results, index, false); } if (pair != null) { training.Add(pair); } } }
/// <summary> /// Read an object. /// </summary> public Object Read(Stream mask0) { var ins0 = new EncogReadHelper(mask0); EncogFileSection section; var samples = new BasicMLDataSet(); IDictionary<String, String> networkParams = null; PNNKernelType kernel = default(PNNKernelType) /* was: null */; PNNOutputMode outmodel = default(PNNOutputMode) /* was: null */; int inputCount = 0; int outputCount = 0; double error = 0; double[] sigma = null; while ((section = ins0.ReadNextSection()) != null) { if (section.SectionName.Equals("PNN") && section.SubSectionName.Equals("PARAMS")) { networkParams = section.ParseParams(); } if (section.SectionName.Equals("PNN") && section.SubSectionName.Equals("NETWORK")) { IDictionary<String, String> paras = section.ParseParams(); inputCount = EncogFileSection.ParseInt(paras, PersistConst.InputCount); outputCount = EncogFileSection.ParseInt(paras, PersistConst.OutputCount); kernel = StringToKernel(paras[PersistConst.Kernel]); outmodel = StringToOutputMode(paras[PropertyOutputMode]); error = EncogFileSection .ParseDouble(paras, PersistConst.Error); sigma = section.ParseDoubleArray(paras, PersistConst.Sigma); } if (section.SectionName.Equals("PNN") && section.SubSectionName.Equals("SAMPLES")) { foreach (String line in section.Lines) { IList<String> cols = EncogFileSection .SplitColumns(line); int index = 0; var inputData = new BasicMLData(inputCount); for (int i = 0; i < inputCount; i++) { inputData[i] = CSVFormat.EgFormat.Parse(cols[index++]); } var idealData = new BasicMLData(inputCount); idealData[0] = CSVFormat.EgFormat.Parse(cols[index++]); IMLDataPair pair = new BasicMLDataPair(inputData, idealData); samples.Add(pair); } } } var result = new BasicPNN(kernel, outmodel, inputCount, outputCount); if (networkParams != null) { EngineArray.PutAll(networkParams, result.Properties); } result.Samples = samples; result.Error = error; if (sigma != null) { EngineArray.ArrayCopy(sigma, result.Sigma); } return result; }
/// <summary> /// Processes the specified double serie into an IMLDataset. /// To use this method, you must provide a formated double array. /// The number of points in the input window makes the input array , and the predict window will create the array used in ideal. /// Example you have an array with 1, 2, 3 , 4 , 5. /// You can use this method to make an IMLDataset 4 inputs and 1 ideal (5). /// </summary> /// <param name="data">The data.</param> /// <param name="_inputWindow">The _input window.</param> /// <param name="_predictWindow">The _predict window.</param> /// <returns></returns> public static IMLDataSet ProcessDoubleSerieIntoIMLDataset(double[] data, int _inputWindow, int _predictWindow) { var result = new BasicMLDataSet(); int totalWindowSize = _inputWindow + _predictWindow; int stopPoint = data.Length - totalWindowSize; for (int i = 0; i < stopPoint; i++) { var inputData = new BasicMLData(_inputWindow); var idealData = new BasicMLData(_predictWindow); int index = i; // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = data[index++]; } // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] = data[index++]; } var pair = new BasicMLDataPair(inputData, idealData); result.Add(pair); } return result; }
/// <summary> /// Load the binary dataset to memory. Memory access is faster. /// </summary> /// <returns>A memory dataset.</returns> public IMLDataSet LoadToMemory() { var result = new BasicMLDataSet(); foreach (IMLDataPair pair in this) { result.Add(pair); } return result; }
/// <summary> /// Processes the specified double serie into an IMLDataset. /// To use this method, you must provide a formated double array with the input data and the ideal data in another double array. /// The number of points in the input window makes the input array , and the predict window will create the array used in ideal. /// This method will use ALL the data inputs and ideals you have provided. /// </summary> /// <param name="datainput">The datainput.</param> /// <param name="ideals">The ideals.</param> /// <param name="_inputWindow">The _input window.</param> /// <param name="_predictWindow">The _predict window.</param> /// <returns></returns> public static IMLDataSet ProcessDoubleSerieIntoIMLDataset(List<double> datainput,List<double>ideals, int _inputWindow, int _predictWindow) { var result = new BasicMLDataSet(); //int count = 0; ////lets check if there is a modulo , if so we move forward in the List of doubles in inputs.This is just a check ////as the data of inputs should be able to fill without having . //while (datainput.Count % _inputWindow !=0) //{ // count++; //} var inputData = new BasicMLData(_inputWindow); var idealData = new BasicMLData(_predictWindow); foreach (double d in datainput) { // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = d; } } foreach (double ideal in ideals) { // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] =ideal; } } var pair = new BasicMLDataPair(inputData, idealData); result.Add(pair); return result; }
public BasicMLDataSet RestoreDataSet(BasicMLDataSet dataToProcess) { BasicMLDataSet set = new BasicMLDataSet(); using (IEnumerator<IMLDataPair> enumerator = dataToProcess.GetEnumerator()) { IMLDataPair pair; bool flag; goto Label_0026; Label_0011: pair = enumerator.Current; set.Add(this.RestoreDataVector(pair)); Label_0026: flag = enumerator.MoveNext(); if (3 == 0) { goto Label_0026; } if (flag) { goto Label_0011; } } return set; }