public static IMLDataSet GenerateTrainingSet(double[][] inputData, double[][] outputData, int leadWindowSize, int lagWindowSize) { var temporalDataSet = new TemporalMLDataSet(lagWindowSize, leadWindowSize); int inputCount = inputData[0].Length; int outputCount = outputData[0].Length; for (int i = 0; i < inputCount; i++) { var desc = new TemporalDataDescription(TemporalDataDescription.Type.Raw, true, false); temporalDataSet.AddDescription(desc); } for (int i = 0; i < outputCount; i++) { var desc = new TemporalDataDescription(TemporalDataDescription.Type.Raw, false, true); temporalDataSet.AddDescription(desc); } for (int i = 0; i < inputData.Length; i++) { var point = temporalDataSet.CreatePoint(i); point.Data = inputData[i].Concat(outputData[i]).ToArray(); } temporalDataSet.Generate(); return(temporalDataSet); }
/// <summary> /// Generates a temporal data set with a given double serie. /// uses Type percent change. /// </summary> /// <param name="inputserie">The inputserie.</param> /// <param name="windowsize">The windowsize.</param> /// <param name="predictsize">The predictsize.</param> /// <returns></returns> public static TemporalMLDataSet GenerateTrainingWithPercentChangeOnSerie(double[] inputserie, int windowsize, int predictsize) { TemporalMLDataSet result = new TemporalMLDataSet(windowsize, predictsize); TemporalDataDescription desc = new TemporalDataDescription(TemporalDataDescription.Type.PercentChange, true, true); result.AddDescription(desc); for (int index = 0; index < inputserie.Length - 1; index++) { TemporalPoint point = new TemporalPoint(1); point.Sequence = index; point.Data[0] = inputserie[index]; result.Points.Add(point); } result.Generate(); return(result); }
// ReSharper disable UnusedMember.Local private static void CreateEvaluationSet(string @fileName) // ReSharper restore UnusedMember.Local { List <double> Opens = QuickCSVUtils.QuickParseCSV(fileName, "Open", 1200, 1200); List <double> High = QuickCSVUtils.QuickParseCSV(fileName, "High", 1200, 1200); List <double> Low = QuickCSVUtils.QuickParseCSV(fileName, "Low", 1200, 1200); List <double> Close = QuickCSVUtils.QuickParseCSV(fileName, "Close", 1200, 1200); List <double> Volume = QuickCSVUtils.QuickParseCSV(fileName, 5, 1200, 1200); double[] Ranges = NetworkUtility.CalculateRanges(Opens.ToArray(), Close.ToArray()); TemporalMLDataSet superTemportal = TrainerHelper.GenerateTrainingWithPercentChangeOnSerie(100, 1, Opens.ToArray(), Close.ToArray(), High.ToArray(), Low.ToArray(), Volume.ToArray()); IMLDataPair aPairInput = TrainerHelper.ProcessPairs(NetworkUtility.CalculatePercents(Opens.ToArray()), NetworkUtility.CalculatePercents(Opens.ToArray()), 100, 1); IMLDataPair aPairInput3 = TrainerHelper.ProcessPairs(NetworkUtility.CalculatePercents(Close.ToArray()), NetworkUtility.CalculatePercents(Close.ToArray()), 100, 1); IMLDataPair aPairInput2 = TrainerHelper.ProcessPairs(NetworkUtility.CalculatePercents(High.ToArray()), NetworkUtility.CalculatePercents(High.ToArray()), 100, 1); IMLDataPair aPairInput4 = TrainerHelper.ProcessPairs(NetworkUtility.CalculatePercents(Volume.ToArray()), NetworkUtility.CalculatePercents(Volume.ToArray()), 100, 1); IMLDataPair aPairInput5 = TrainerHelper.ProcessPairs(NetworkUtility.CalculatePercents(Ranges.ToArray()), NetworkUtility.CalculatePercents(Ranges.ToArray()), 100, 1); List <IMLDataPair> listData = new List <IMLDataPair>(); listData.Add(aPairInput); listData.Add(aPairInput2); listData.Add(aPairInput3); listData.Add(aPairInput4); listData.Add((aPairInput5)); var minitrainning = new BasicMLDataSet(listData); var network = (BasicNetwork)CreateElmanNetwork(100, 1); double normalCorrectRate = EvaluateNetworks(network, minitrainning); double temporalErrorRate = EvaluateNetworks(network, superTemportal); Console.WriteLine(@"Percent Correct with normal Data Set:" + normalCorrectRate + @" Percent Correct with temporal Dataset:" + temporalErrorRate); Console.WriteLine(@"Paused , Press a key to continue to evaluation"); Console.ReadKey(); }
public IMLDataSet GenerateTraining() { var result = new TemporalMLDataSet(WindowSize, 1); var desc = new TemporalDataDescription(TemporalDataDescription.Type.Raw, true, true); result.AddDescription(desc); for (int i = WindowSize; i < _normalizedTrainingData.Length; i++) { var point = new TemporalPoint(1) { Sequence = i }; point.Data[0] = _normalizedTrainingData[i]; result.Points.Add(point); } result.Generate(); return(result); }
public IMLDataSet GenerateTraining() { var result = new TemporalMLDataSet(WindowSize, 1); var desc = new TemporalDataDescription(TemporalDataDescription.Type.Raw, true, true); result.AddDescription(desc); for (var year = TrainStart; year < TrainEnd; year++) { var point = new TemporalPoint(1) { Sequence = year }; point.Data[0] = _normalizedForexPair[year]; result.Points.Add(point); } result.Generate(); return(result); }
public static IMLDataSet GenerateTraining() { TemporalMLDataSet result = new TemporalMLDataSet(WindowSize, 1); TemporalDataDescription desc = new TemporalDataDescription(TemporalDataDescription.Type.Raw, true, true); result.AddDescription(desc); for (int year = TrainStart; year < TrainEnd; year++) { TemporalPoint point = new TemporalPoint(1); point.Sequence = year; point.Data[0] = _normalizedSunspots[year]; result.Points.Add(point); } result.Generate(); return(result); }
public static void TrainSVMNetwork(ref IExampleInterface app) { //BasicMLDataSet set = CreateEval.CreateEvaluationSetAndLoad(app.Args[1],1000,500,CONFIG.INPUT_WINDOW,CONFIG.PREDICT_WINDOW); TemporalMLDataSet Tempo = CreateEval.GenerateATemporalSet(app.Args[1], 1000, 500, CONFIG.INPUT_WINDOW, CONFIG.PREDICT_WINDOW); SupportVectorMachine machine = createNetwork(); //Train it.. double error = TrainNetworks(machine, Tempo); Console.WriteLine(@"SVM NetWork Trained to :" + error); SuperUtils.SaveTraining(CONFIG.DIRECTORY, CONFIG.SVMTRAINING_FILE, Tempo); SuperUtils.SaveNetwork(CONFIG.DIRECTORY, CONFIG.SVMNETWORK_FILE, machine); Console.WriteLine(@"Network Saved to :" + CONFIG.DIRECTORY + @" File Named :" + CONFIG.SVMNETWORK_FILE); Console.WriteLine(@"Training Saved to :" + CONFIG.DIRECTORY + @" File Named :" + CONFIG.SVMTRAINING_FILE); MakeAPause(); }
/// <summary> /// Generates a temporal data set with a given double serie or a any number of double series , making your inputs. /// uses Type percent change. /// </summary> /// <param name="windowsize">The windowsize.</param> /// <param name="predictsize">The predictsize.</param> /// <param name="inputserie">The inputserie.</param> /// <returns></returns> public static TemporalMLDataSet GenerateTrainingWithPercentChangeOnSerie(int windowsize, int predictsize, params double[][] inputserie) { TemporalMLDataSet result = new TemporalMLDataSet(windowsize, predictsize); TemporalDataDescription desc = new TemporalDataDescription(TemporalDataDescription.Type.PercentChange, true, true); result.AddDescription(desc); foreach (double[] t in inputserie) { for (int j = 0; j < t.Length; j++) { TemporalPoint point = new TemporalPoint(1); point.Sequence = j; point.Data[0] = t[j]; result.Points.Add(point); } result.Generate(); return(result); } return(null); }
public IMLDataSet GenerateTraining() { var result = new TemporalMLDataSet(WindowSize, 1); var desc = new TemporalDataDescription( TemporalDataDescription.Type.Raw, true, true); result.AddDescription(desc); for (int day = TrainStart; day < TrainEnd; day++) { var point = new TemporalPoint(1) { Sequence = day, Data = { [0] = _normalizedArray[day] } }; result.Points.Add(point); } result.Generate(); return(result); }
public static BasicMLDataSet CreateEvaluationSetAndLoad(string @fileName, int startLine, int HowMany, int WindowSize, int outputsize) { List <double> Opens = NetworkUtility.QuickParseCSV(fileName, "Open", startLine, HowMany); List <double> High = NetworkUtility.QuickParseCSV(fileName, "High", startLine, HowMany); List <double> Low = NetworkUtility.QuickParseCSV(fileName, "Low", startLine, HowMany); List <double> Close = NetworkUtility.QuickParseCSV(fileName, "Close", startLine, HowMany); List <double> Volume = NetworkUtility.QuickParseCSV(fileName, 5, startLine, HowMany); TemporalMLDataSet superTemportal = new TemporalMLDataSet(WindowSize, outputsize); double[] Ranges = NetworkUtility.CalculateRanges(Opens.ToArray(), Close.ToArray()); superTemportal = NetworkUtility.GenerateTrainingWithPercentChangeOnSerie(100, 1, Opens.ToArray(), Close.ToArray(), High.ToArray(), Low.ToArray(), Volume.ToArray()); IMLDataPair aPairInput = SuperUtils.ProcessPair(NetworkUtility.CalculatePercents(Opens.ToArray()), NetworkUtility.CalculatePercents(Opens.ToArray()), WindowSize, outputsize); IMLDataPair aPairInput3 = SuperUtils.ProcessPair(NetworkUtility.CalculatePercents(Close.ToArray()), NetworkUtility.CalculatePercents(Close.ToArray()), WindowSize, outputsize); IMLDataPair aPairInput2 = SuperUtils.ProcessPair(NetworkUtility.CalculatePercents(High.ToArray()), NetworkUtility.CalculatePercents(High.ToArray()), WindowSize, outputsize); IMLDataPair aPairInput4 = SuperUtils.ProcessPair(NetworkUtility.CalculatePercents(Volume.ToArray()), NetworkUtility.CalculatePercents(Volume.ToArray()), WindowSize, outputsize); IMLDataPair aPairInput5 = SuperUtils.ProcessPair(NetworkUtility.CalculatePercents(Ranges.ToArray()), NetworkUtility.CalculatePercents(Ranges.ToArray()), WindowSize, outputsize); List <IMLDataPair> listData = new List <IMLDataPair>(); listData.Add(aPairInput); listData.Add(aPairInput2); listData.Add(aPairInput3); listData.Add(aPairInput4); listData.Add((aPairInput5)); var minitrainning = new BasicMLDataSet(listData); return(minitrainning); }
public IMLDataSet GenerateTraining(double[] normalizedData) { var result = new TemporalMLDataSet(WindowSize, 1); TemporalDataDescription desc = new TemporalDataDescription(TemporalDataDescription.Type.Raw, true, true); result.AddDescription(desc); int TrainStart = 0; int TrainEnd = normalizedData.Length; for (int index = TrainStart; index < TrainEnd; index++) { TemporalPoint point = new TemporalPoint(1) { Sequence = index }; point.Data[0] = normalizedData[index]; result.Points.Add(point); } result.Generate(); return(result); }