public double[] NormalizeData(double[] data, double lo, double hi, out NormalizeArray norm) { norm = new NormalizeArray { NormalizedLow = lo, NormalizedHigh = hi }; return(norm.Process(data)); }
public TimeSeries Predict(SupportVectorMachine network, NormalizeArray norm, TimeSeries simulatedData) { double[] data = GenerateData(simulatedData); int data_count = simulatedData.Count; TimeSeries ts = new TimeSeries(); double input_val = 0; for (int idx = 0; idx < data_count; ++idx) { var input = new BasicMLData(WindowSize); for (var i = 0; i < WindowSize; i++) { int idx2 = (idx - WindowSize) + i; if (idx2 < 0) { input_val = 0; } else { input_val = norm.Stats.Normalize(data[idx2]); } input[i] = input_val; } IMLData output = network.Compute(input); double prediction = norm.Stats.DeNormalize(output[0]); ts.Add(simulatedData.TimeStamp(idx), prediction, false); } return(ts); }
public List <double[]> Convert(List <StockQuote> input) { var matrixConvert = new StockQuotesToMatrix(); var matrix = matrixConvert.Convert(input); var max = matrix[1].Max(); var min = matrix[2].Min(); var priceNormalizer = new NormalizeArray((int)Math.Floor(min), (int)Math.Ceiling(max)) { NormalizedHigh = 1.0, NormalizedLow = 0.0 }; var volMin = matrix[4].Min(); var volMax = matrix[4].Max(); var volNormalizer = new NormalizeArray((int)Math.Floor(volMin), (int)Math.Ceiling(volMax)) { NormalizedHigh = 1.0, NormalizedLow = 0.0 }; var openNormalized = default(double[]); var highNormalized = default(double[]); var lowNormalized = default(double[]); var closeNormalized = default(double[]); var volNormalized = default(double[]); Parallel.Invoke( () => openNormalized = priceNormalizer.Process(matrix[0]), () => highNormalized = priceNormalizer.Process(matrix[1]), () => lowNormalized = priceNormalizer.Process(matrix[2]), () => closeNormalized = priceNormalizer.Process(matrix[3]), () => volNormalized = volNormalizer.Process(matrix[4])); return(new List <double[]> { openNormalized, highNormalized, lowNormalized, closeNormalized, volNormalized }); }
/// <summary> /// Normalizes an array using Normalize Array (and not DataNormalization way : Faster). /// </summary> /// <param name="lo">The lo.</param> /// <param name="hi">The hi.</param> /// <param name="Arrays">The arrays.</param> /// <returns></returns> public static double[] NormalizeArray(double lo, double hi, double[] Arrays) { var norm = new NormalizeArray { NormalizedHigh = hi, NormalizedLow = lo }; return(norm.Process(Arrays)); }
public static void NormalizeSunspots(double lo, double hi) { NormalizeArray norm = new NormalizeArray { NormalizedLow = lo, NormalizedHigh = hi }; _normalizedSunspots = norm.Process(SUNSPOTS); _closedLoopSunspots = EngineArray.ArrayCopy(_normalizedSunspots); }
public void NormalizeForexPair(double lo, double hi) { array = new NormalizeArray { NormalizedHigh = hi, NormalizedLow = lo }; // create arrays to hold the normalized forex pair data _normalizedForexPair = array.Process(ForexPair); _closedLoopForexPair = EngineArray.ArrayCopy(_normalizedForexPair); }
public Stage1NeuralNetwork(int windowSize, int iterations, double[] trainingData, double[] predictData) { WindowSize = windowSize; this.iterations = iterations; this.trainingData = trainingData; predictionData = predictData; normalizeArray = new NormalizeArray { NormalizedHigh = normalizeHi, NormalizedLow = normalizeLo }; }
public void NormalizeSunspots(double lo, double hi) { var norm = new NormalizeArray { NormalizedHigh = hi, NormalizedLow = lo }; // create arrays to hold the normalized sunspots _normalizedSunspots = norm.Process(Sunspots); _closedLoopSunspots = EngineArray.ArrayCopy(_normalizedSunspots); }
public static void normalizeSunspots(double lo, double hi) { NormalizeArray norm = new NormalizeArray(); norm.NormalizedHigh = (hi); norm.NormalizedLow = lo; // create arrays to hold the normalized sunspots normalizedSunspots = norm.Process(SUNSPOTS); closedLoopSunspots = EngineArray.ArrayCopy(normalizedSunspots); }
protected virtual RBFNetwork BuildNetwork(TimeSeries simulatedData, out NormalizeArray norm) { double[] data = GenerateData(simulatedData); double[] normalizedData = NormalizeData(data, mNormalizedLow, mNormalzedHigh, out norm); RBFNetwork network = CreateNetwork(); IMLDataSet training = GenerateTraining(normalizedData); Train(network, training); return(network); }
public void TestNormalize() { var norm = new NormalizeArray(); double[] input = { 1, 5, 10 }; double[] output = norm.Process(input); Assert.AreEqual(3, output.Length); Assert.AreEqual(-1.0, output[0]); Assert.AreEqual(1.0, output[2]); Assert.AreEqual(1.0, norm.Stats.ActualLow); Assert.AreEqual(10.0, norm.Stats.ActualHigh); }
public double[] NormalizeData(double[] data, double lo, double hi, out NormalizeArray norm) { norm = new NormalizeArray(); norm.NormalizedHigh = (hi); norm.NormalizedLow = lo; // create arrays to hold the normalized sunspots double[] normalizedData = norm.Process(data); return(normalizedData); }
public SupportVectorMachine BuildNetwork(TimeSeries simulatedData, out NormalizeArray norm) { double[] data = GenerateData(simulatedData); double[] normalizedData = NormalizeData(data, 0.1, 0.9, out norm); SupportVectorMachine network = CreateNetwork(); IMLDataSet training = GenerateTraining(normalizedData); SupportVectorMachine trained = SVMSearch(network, training); train(trained, training); return(trained); }
static void Normalization() { //Single value var weightNorm = new NormalizedField(NormalizationAction.Normalize, "Weights", ahigh: 40.0, alow: 50.0, nhigh: -1.0, nlow: 1.0); double normalizedValue = weightNorm.Normalize(42.5); double denormalizedValue = weightNorm.DeNormalize(normalizedValue); //Array double[] weights = new double[] { 40.0, 42.5, 43.0, 49.0, 50.0 }; var weightNorm2 = new NormalizeArray(); weightNorm2.NormalizedHigh = 1.0; weightNorm2.NormalizedLow = -1.0; double[] normalizedWeights = weightNorm2.Process(weights); }
/// <summary> /// Send all the (unNormalized)inputs in the as the network was trained and this outputs a list of double ready for a network.compute(imldata result)). /// </summary> /// <param name="WindoSize"> Size of the windo. </param> /// <param name="pparamInputs"> A variable-length parameters list containing pparam inputs. </param> /// <returns> /// The compute pair ready for network computes) /// </returns> public static Tuple <List <double>, NormalizeArray> GetReadiedComputePair(int WindoSize, params double[][] pparamInputs) { try { //We make a dic with the count of inputs being the number of double series we are sending in. Dictionary <int, double[]> inputsDics = new Dictionary <int, double[]>(pparamInputs.Length); int indexS = 0; NormalizeArray Normee = new NormalizeArray(-1, 1); // PredictionStats.NormalizationClass NormingClass = new PredictionStats.NormalizationClass(); foreach (double[] doubleSeries in pparamInputs) { inputsDics.Add(indexS++, Normee.Process(doubleSeries)); } List <double> dtda = new List <double>(); int listindex = 0; int currentindex = 0; //count the fields -1 ,as it starts from zero. int dicinputsCount = inputsDics.Keys.Count - 1; foreach (double d in inputsDics[0]) { if (currentindex++ < WindoSize) { dtda.Add(d); //we put all the fields which are in the dic. while (dicinputsCount > 0) { dtda.Add(inputsDics[dicinputsCount--][listindex]); } //We reset the field count for a later pass. dicinputsCount = inputsDics.Keys.Count - 1; } if (currentindex == WindoSize) { return(new Tuple <List <double>, NormalizeArray>(dtda, Normee)); } //Lets increment the indexes.. listindex++; } return(new Tuple <List <double>, NormalizeArray>(dtda, Normee)); } catch (Exception ex) { throw ex; } }
public Stage2NeuralNetwork(int inputs, int iterations, DenseMatrix trainingData, double[] predictData) { this.inputs = inputs; this.iterations = iterations; this.trainingData = trainingData; predictionData = predictData; normalizeArrayInput = new NormalizeArray[trainingData.RowCount]; for (int i = 0; i < normalizeArrayInput.Length; i++) { normalizeArrayInput[i] = new NormalizeArray { NormalizedHigh = normalizeHi, NormalizedLow = normalizeLo } } ; normalizeArrayOutput = new NormalizeArray { NormalizedHigh = normalizeHi, NormalizedLow = normalizeLo }; }
public TimeSeries Forecast(SupportVectorMachine network, NormalizeArray norm, TimeSeries simulatedData, List <DateTime> futureTimes) { int data_count = simulatedData.Count; int future_data_count = futureTimes.Count; double[] data = new double[data_count + future_data_count]; for (int idx = 0; idx < data_count; ++idx) { data[idx] = simulatedData[idx]; } for (int idx = 0; idx < future_data_count; ++idx) { data[data_count + idx] = 0; } TimeSeries ts = new TimeSeries(); double input_val = 0; for (int idx = 0; idx < future_data_count; ++idx) { var input = new BasicMLData(WindowSize); for (var i = 0; i < WindowSize; i++) { int idx2 = (data_count + idx - WindowSize) + i; if (idx2 < 0) { input_val = 0; } else { input_val = norm.Stats.Normalize(data[idx2]); } input[i] = input_val; } IMLData output = network.Compute(input); double prediction = norm.Stats.DeNormalize(output[0]); data[data_count + idx] = prediction; ts.Add(futureTimes[idx], prediction, false); } return(ts); }
/// <summary> /// Normalizes an array using Normalize Array (and not DataNormalization way : Faster). /// The high and low are the standard -1,1. /// </summary> /// <param name="Arrays">The arrays.</param> /// <returns>returns a tuple with the array in item1 and the normalization in item 2.</returns> public static Tuple <double[], NormalizeArray> NormalizeArray(double[] Arrays) { var norm = new NormalizeArray(); return(new Tuple <double[], NormalizeArray>(norm.Process(Arrays), norm)); }
static void Main(string[] args) { double error = 0.00001; double[][] XOR_Input = { new[] { 0.0, 0.0 }, new[] { 1.0, 0.0 }, new[] { 0.0, 1.0 }, new[] { 1.0, 1.0 } }; double[][] XOR_Ideal = { new[] { 0.0 }, new[] { 1.0 }, new[] { 1.0 }, new[] { 0.0 } }; var trainingSet = new BasicMLDataSet(XOR_Input, XOR_Ideal); BasicNetwork network = CreateNetwork(); //var train = new Backpropagation(network, trainingSet, 0.7, 0.2); //var train = new ManhattanPropagation(network, trainingSet, 0.001); // var train = new QuickPropagation(network, trainingSet, 2.0); //var train = new ResilientPropagation(network, trainingSet); //var train = new ScaledConjugateGradient(network, trainingSet); var train = new LevenbergMarquardtTraining(network, trainingSet); int epoch = 0; do { train.Iteration(); Console.WriteLine("Iteration No: {0}, Error: {1}", ++epoch, train.Error); }while (train.Error > error); foreach (var item in trainingSet) { var output = network.Compute(item.Input); Console.WriteLine("Input: {0}, {1} \tIdeal: {2} \t Actual: {3}", item.Input[0], item.Input[1], item.Ideal[0], output[0]); } Console.WriteLine("Training done."); Console.WriteLine("press any key to continue"); Console.ReadLine(); // normalized value var weightNorm = new NormalizedField(NormalizationAction.Normalize, "Weights", 50.0, 40.0, 1.0, -1.0); double normalizedValue = weightNorm.Normalize(42.5); double denormalizedValue = weightNorm.DeNormalize(normalizedValue); Console.WriteLine("Normalized value: {0}", normalizedValue.ToString()); Console.WriteLine("press any key to continue"); Console.ReadLine(); // normalized array double[] weights = new double[] { 40.0, 42.5, 43.0, 49.0, 50.0 }; var weightNormArray = new NormalizeArray(); weightNormArray.NormalizedHigh = 1.0; weightNormArray.NormalizedLow = -1.0; double[] normalizedWeights = weightNormArray.Process(weights); foreach (var item in normalizedWeights) { Console.WriteLine("Normalized value: {0}", item.ToString()); } Console.WriteLine("press any key to continue"); Console.ReadLine(); }
/// <summary> /// Loads variables inputs and one ideal double series into an imldataset. /// </summary> /// <param name="idealsinputs"></param> /// <param name="WindoSize"></param> /// <param name="pparamInputs"></param> /// <returns></returns> public static Tuple <IMLDataSet, NormalizeArray> Load(double[] idealsinputs, int WindoSize, params double[][] pparamInputs) { try { var finalSet = new BasicMLDataSet(); //We make a dic with the count of inputs being the number of double series we are sending in. Dictionary <int, double[]> inputsDics = new Dictionary <int, double[]>(pparamInputs.Length); int indexS = 0; //We make a normalizeArray which we will return as a tuple ready for denormalization. NormalizeArray Normer = new NormalizeArray(-1, 1); //Process each inputs. foreach (double[] doubleSeries in pparamInputs) { inputsDics.Add(indexS++, Normer.Process(doubleSeries)); } //Process the ideals. var idealNormed = Normer.Process(idealsinputs); //Make a list which will hold the inputs one after the others List <double> dtda = new List <double>(); int listindex = 0; int currentindex = 0; //starts from zero so count -1.. int dicinputsCount = inputsDics.Keys.Count - 1; //Process the input normed. foreach (double d in inputsDics[0]) { if (currentindex++ < WindoSize) { dtda.Add(d); //we put all the fields which are in the dic. while (dicinputsCount > 0) { dtda.Add(inputsDics[dicinputsCount--][listindex]); } //We reset the field count for a later pass. dicinputsCount = inputsDics.Keys.Count - 1; } if (currentindex == WindoSize) { //Make an imldata pair, and add it to the imldataset...reset the temp list of inputs... var pair = new BasicMLDataPair( new BasicMLData(dtda.ToArray()), new BasicMLData(new double[] { idealNormed[listindex] })); currentindex = 0; dtda.Clear(); finalSet.Add(pair); } //Lets increment the indexes.. listindex++; } //Return the dataset and the normalization array.. return(new Tuple <IMLDataSet, NormalizeArray>(finalSet, Normer)); } catch (Exception ex) { Console.WriteLine("Got an error : ", ex); throw new Exception("Error parsing points...."); } }