public List <double[]> Convert(List <StockQuote> input)
        {
            var matrixConvert   = new StockQuotesToMatrix();
            var matrix          = matrixConvert.Convert(input);
            var max             = matrix[1].Max();
            var min             = matrix[2].Min();
            var priceNormalizer = new NormalizeArray((int)Math.Floor(min), (int)Math.Ceiling(max))
            {
                NormalizedHigh = 1.0,
                NormalizedLow  = 0.0
            };
            var volMin        = matrix[4].Min();
            var volMax        = matrix[4].Max();
            var volNormalizer = new NormalizeArray((int)Math.Floor(volMin), (int)Math.Ceiling(volMax))
            {
                NormalizedHigh = 1.0,
                NormalizedLow  = 0.0
            };
            var openNormalized  = default(double[]);
            var highNormalized  = default(double[]);
            var lowNormalized   = default(double[]);
            var closeNormalized = default(double[]);
            var volNormalized   = default(double[]);

            Parallel.Invoke(
                () => openNormalized  = priceNormalizer.Process(matrix[0]),
                () => highNormalized  = priceNormalizer.Process(matrix[1]),
                () => lowNormalized   = priceNormalizer.Process(matrix[2]),
                () => closeNormalized = priceNormalizer.Process(matrix[3]),
                () => volNormalized   = volNormalizer.Process(matrix[4]));
            return(new List <double[]> {
                openNormalized, highNormalized, lowNormalized, closeNormalized, volNormalized
            });
        }
예제 #2
0
 public double[] NormalizeData(double[] data, double lo, double hi, out NormalizeArray norm)
 {
     norm = new NormalizeArray {
         NormalizedLow = lo, NormalizedHigh = hi
     };
     return(norm.Process(data));
 }
예제 #3
0
        /// <summary>
        /// Normalizes an array using Normalize Array (and not DataNormalization way : Faster).
        /// </summary>
        /// <param name="lo">The lo.</param>
        /// <param name="hi">The hi.</param>
        /// <param name="Arrays">The arrays.</param>
        /// <returns></returns>
        public static double[] NormalizeArray(double lo, double hi, double[] Arrays)
        {
            var norm = new NormalizeArray {
                NormalizedHigh = hi, NormalizedLow = lo
            };

            return(norm.Process(Arrays));
        }
예제 #4
0
 public void NormalizeForexPair(double lo, double hi)
 {
     array = new NormalizeArray {
         NormalizedHigh = hi, NormalizedLow = lo
     };
     // create arrays to hold the normalized forex pair data
     _normalizedForexPair = array.Process(ForexPair);
     _closedLoopForexPair = EngineArray.ArrayCopy(_normalizedForexPair);
 }
        public static void NormalizeSunspots(double lo, double hi)
        {
            NormalizeArray norm = new NormalizeArray {
                NormalizedLow = lo, NormalizedHigh = hi
            };

            _normalizedSunspots = norm.Process(SUNSPOTS);
            _closedLoopSunspots = EngineArray.ArrayCopy(_normalizedSunspots);
        }
예제 #6
0
        public void NormalizeSunspots(double lo, double hi)
        {
            var norm = new NormalizeArray {
                NormalizedHigh = hi, NormalizedLow = lo
            };

            // create arrays to hold the normalized sunspots
            _normalizedSunspots = norm.Process(Sunspots);
            _closedLoopSunspots = EngineArray.ArrayCopy(_normalizedSunspots);
        }
예제 #7
0
            public static void normalizeSunspots(double lo, double hi)
            {
                NormalizeArray norm = new NormalizeArray();

                norm.NormalizedHigh = (hi);
                norm.NormalizedLow  = lo;

                // create arrays to hold the normalized sunspots
                normalizedSunspots = norm.Process(SUNSPOTS);
                closedLoopSunspots = EngineArray.ArrayCopy(normalizedSunspots);
            }
        public void TestNormalize()
        {
            var norm = new NormalizeArray();

            double[] input  = { 1, 5, 10 };
            double[] output = norm.Process(input);
            Assert.AreEqual(3, output.Length);
            Assert.AreEqual(-1.0, output[0]);
            Assert.AreEqual(1.0, output[2]);
            Assert.AreEqual(1.0, norm.Stats.ActualLow);
            Assert.AreEqual(10.0, norm.Stats.ActualHigh);
        }
        public double[] NormalizeData(double[] data, double lo, double hi, out NormalizeArray norm)
        {
            norm = new NormalizeArray();
            norm.NormalizedHigh = (hi);
            norm.NormalizedLow  = lo;

            // create arrays to hold the normalized sunspots
            double[] normalizedData = norm.Process(data);


            return(normalizedData);
        }
예제 #10
0
파일: Program.cs 프로젝트: adamkry/neural2
        static void Normalization()
        {
            //Single value
            var    weightNorm        = new NormalizedField(NormalizationAction.Normalize, "Weights", ahigh: 40.0, alow: 50.0, nhigh: -1.0, nlow: 1.0);
            double normalizedValue   = weightNorm.Normalize(42.5);
            double denormalizedValue = weightNorm.DeNormalize(normalizedValue);

            //Array
            double[] weights     = new double[] { 40.0, 42.5, 43.0, 49.0, 50.0 };
            var      weightNorm2 = new NormalizeArray();

            weightNorm2.NormalizedHigh = 1.0;
            weightNorm2.NormalizedLow  = -1.0;
            double[] normalizedWeights = weightNorm2.Process(weights);
        }
예제 #11
0
        /// <summary>
        /// Send all the (unNormalized)inputs in the as the network was trained and this outputs a list of double ready for a network.compute(imldata result)).
        /// </summary>
        /// <param name="WindoSize"> Size of the windo. </param>
        /// <param name="pparamInputs"> A variable-length parameters list containing pparam inputs. </param>
        /// <returns>
        ///  The compute pair ready for network computes)
        /// </returns>
        public static Tuple <List <double>, NormalizeArray> GetReadiedComputePair(int WindoSize, params double[][] pparamInputs)
        {
            try
            {
                //We make a dic with the count of inputs being the number of double series we are sending in.
                Dictionary <int, double[]> inputsDics = new Dictionary <int, double[]>(pparamInputs.Length);

                int            indexS = 0;
                NormalizeArray Normee = new NormalizeArray(-1, 1);
                // PredictionStats.NormalizationClass NormingClass = new PredictionStats.NormalizationClass();
                foreach (double[] doubleSeries in pparamInputs)
                {
                    inputsDics.Add(indexS++, Normee.Process(doubleSeries));
                }
                List <double> dtda         = new List <double>();
                int           listindex    = 0;
                int           currentindex = 0;
                //count the fields -1 ,as it starts from zero.
                int dicinputsCount = inputsDics.Keys.Count - 1;
                foreach (double d in inputsDics[0])
                {
                    if (currentindex++ < WindoSize)
                    {
                        dtda.Add(d);
                        //we put all the fields which are in the dic.
                        while (dicinputsCount > 0)
                        {
                            dtda.Add(inputsDics[dicinputsCount--][listindex]);
                        }
                        //We reset the field count for a later pass.
                        dicinputsCount = inputsDics.Keys.Count - 1;
                    }
                    if (currentindex == WindoSize)
                    {
                        return(new Tuple <List <double>, NormalizeArray>(dtda, Normee));
                    }
                    //Lets increment the indexes..
                    listindex++;
                }
                return(new Tuple <List <double>, NormalizeArray>(dtda, Normee));
            }
            catch (Exception ex)
            {
                throw ex;
            }
        }
예제 #12
0
        /// <summary>
        /// Normalizes an array using Normalize Array (and not DataNormalization way : Faster).
        /// The high and low are the standard -1,1.
        /// </summary>
        /// <param name="Arrays">The arrays.</param>
        /// <returns>returns a tuple with the array in item1 and the normalization in item 2.</returns>
        public static Tuple <double[], NormalizeArray> NormalizeArray(double[] Arrays)
        {
            var norm = new NormalizeArray();

            return(new Tuple <double[], NormalizeArray>(norm.Process(Arrays), norm));
        }
예제 #13
0
 /// <summary>
 /// Normalizes an array , and you can retrieve the normalizedArray by ArrayNormalizer object), this lets you do ArrayNormalizer.Stats.Denormalize(2);
 /// </summary>
 /// <param name="inputArray">The input array.</param>
 /// <returns>a normalized array of doubles</returns>
 public static double[] NormalizeThisArray(double[] inputArray)
 {
     return(ArrayNormalizer.Process(inputArray));
 }
예제 #14
0
        static void Main(string[] args)
        {
            double error = 0.00001;

            double[][] XOR_Input =
            {
                new[] { 0.0, 0.0 },
                new[] { 1.0, 0.0 },
                new[] { 0.0, 1.0 },
                new[] { 1.0, 1.0 }
            };

            double[][] XOR_Ideal =
            {
                new[] { 0.0 },
                new[] { 1.0 },
                new[] { 1.0 },
                new[] { 0.0 }
            };

            var trainingSet = new BasicMLDataSet(XOR_Input, XOR_Ideal);

            BasicNetwork network = CreateNetwork();

            //var train = new Backpropagation(network, trainingSet, 0.7, 0.2);
            //var train = new ManhattanPropagation(network, trainingSet, 0.001);
            // var train = new QuickPropagation(network, trainingSet, 2.0);
            //var train = new ResilientPropagation(network, trainingSet);
            //var train = new ScaledConjugateGradient(network, trainingSet);
            var train = new LevenbergMarquardtTraining(network, trainingSet);

            int epoch = 0;

            do
            {
                train.Iteration();
                Console.WriteLine("Iteration No: {0}, Error: {1}", ++epoch, train.Error);
            }while (train.Error > error);

            foreach (var item in trainingSet)
            {
                var output = network.Compute(item.Input);
                Console.WriteLine("Input: {0}, {1} \tIdeal: {2} \t Actual: {3}", item.Input[0], item.Input[1], item.Ideal[0], output[0]);
            }

            Console.WriteLine("Training done.");
            Console.WriteLine("press any key to continue");
            Console.ReadLine();

            // normalized value
            var weightNorm = new NormalizedField(NormalizationAction.Normalize, "Weights", 50.0, 40.0, 1.0, -1.0);

            double normalizedValue   = weightNorm.Normalize(42.5);
            double denormalizedValue = weightNorm.DeNormalize(normalizedValue);

            Console.WriteLine("Normalized value: {0}", normalizedValue.ToString());
            Console.WriteLine("press any key to continue");
            Console.ReadLine();

            // normalized array
            double[] weights         = new double[] { 40.0, 42.5, 43.0, 49.0, 50.0 };
            var      weightNormArray = new NormalizeArray();

            weightNormArray.NormalizedHigh = 1.0;
            weightNormArray.NormalizedLow  = -1.0;
            double[] normalizedWeights = weightNormArray.Process(weights);

            foreach (var item in normalizedWeights)
            {
                Console.WriteLine("Normalized value: {0}", item.ToString());
            }
            Console.WriteLine("press any key to continue");
            Console.ReadLine();
        }
예제 #15
0
        /// <summary>
        /// Loads variables inputs and one ideal double series into an imldataset.
        /// </summary>
        /// <param name="idealsinputs"></param>
        /// <param name="WindoSize"></param>
        /// <param name="pparamInputs"></param>
        /// <returns></returns>
        public static Tuple <IMLDataSet, NormalizeArray> Load(double[] idealsinputs, int WindoSize, params double[][] pparamInputs)
        {
            try
            {
                var finalSet = new BasicMLDataSet();
                //We make a dic with the count of inputs being the number of double series we are sending in.
                Dictionary <int, double[]> inputsDics = new Dictionary <int, double[]>(pparamInputs.Length);
                int indexS = 0;
                //We make a normalizeArray which we will return as a tuple ready for denormalization.
                NormalizeArray Normer = new NormalizeArray(-1, 1);
                //Process each inputs.
                foreach (double[] doubleSeries in pparamInputs)
                {
                    inputsDics.Add(indexS++, Normer.Process(doubleSeries));
                }
                //Process the ideals.
                var idealNormed = Normer.Process(idealsinputs);

                //Make a list which will hold the inputs one after the others
                List <double> dtda         = new List <double>();
                int           listindex    = 0;
                int           currentindex = 0;
                //starts from zero so count -1..
                int dicinputsCount = inputsDics.Keys.Count - 1;
                //Process the input normed.
                foreach (double d in inputsDics[0])
                {
                    if (currentindex++ < WindoSize)
                    {
                        dtda.Add(d);
                        //we put all the fields which are in the dic.
                        while (dicinputsCount > 0)
                        {
                            dtda.Add(inputsDics[dicinputsCount--][listindex]);
                        }
                        //We reset the field count for a later pass.
                        dicinputsCount = inputsDics.Keys.Count - 1;
                    }

                    if (currentindex == WindoSize)
                    {
                        //Make an imldata pair, and add it to the imldataset...reset the temp list of inputs...
                        var pair = new BasicMLDataPair(
                            new BasicMLData(dtda.ToArray()),
                            new BasicMLData(new double[] { idealNormed[listindex] }));
                        currentindex = 0;
                        dtda.Clear();
                        finalSet.Add(pair);
                    }
                    //Lets increment the indexes..
                    listindex++;
                }
                //Return the dataset and the normalization array..
                return(new Tuple <IMLDataSet, NormalizeArray>(finalSet, Normer));
            }
            catch (Exception ex)
            {
                Console.WriteLine("Got an error : ", ex);
                throw new Exception("Error parsing points....");
            }
        }
예제 #16
0
 public double[] NormalizeData(double[] data)
 {
     return(normalizeArrayOutput.Process(data));
 }