Example #1
0
        public DataSerie2D GetClassifiedSerie(DataSerie1D dsIn)
        {
            if (Equals(dsIn, null))
            {
                return(null);
            }
            if (Equals(dsIn, null))
            {
                return(null);
            }

            DataSerie2D dsOut = new DataSerie2D();

            try
            {
                int    classe   = 0;
                double devision = 0;
                foreach (DataItem1D itm in dsIn.Data)
                {
                    devision = Math.Floor(((itm.X_Value * MaxValue) / Step));
                    classe   = (int)devision;
                    dsOut.Add(itm.Title, (itm.X_Value * MaxValue), (classe + 1));
                }
            }
            catch (Exception ex) { throw ex; }
            return(dsOut);
        }
            public int[] GetLayersStruct(DataSerie1D hidenLayersStructure, int inputsCount, int ouputsCount)
            {
                int iCount = 2;

                int[] result = null;

                if ((object.Equals(hidenLayersStructure, null)) || (object.Equals(hidenLayersStructure.Data, null)))
                {
                    result    = new int[iCount];
                    result[0] = inputsCount;
                    result[1] = ouputsCount;
                }
                else
                {
                    iCount = hidenLayersStructure.Data.Count + 1;

                    result = new int[iCount];

                    for (int i = 0; i < (iCount - 1); i++)
                    {
                        result[i] = (Int32)Math.Round(hidenLayersStructure.Data[i].X_Value, 0);
                    }

                    result[(iCount - 1)] = ouputsCount;
                }
                return(result);
            }
Example #3
0
        /// <summary>
        /// Nash Sutcliffe Efficiency "Nash Criterion, Nash and Sutcliffe (1970)" computation of Two series.
        /// </summary>
        /// <param name="O_Serie">Observed data series</param>
        /// <param name="P_Serie">Predicted data series</param>
        /// <returns>Nash criterion Value</returns>
        public static double Compute_Nash_Sutcliffe_Efficiency(DataSerie1D O_Serie, DataSerie1D P_Serie)
        {
            double nash = double.NaN;

            if (CheckDataSeries(O_Serie, P_Serie) == false)
            {
                return(double.NaN);
            }
            try
            {
                double sum1  = 0;
                double sum2  = 0;
                int    N     = O_Serie.Count;
                double oMean = O_Serie.Mean;
                for (int i = 0; i < N; i++)
                {
                    sum1 += Math.Pow((O_Serie.Data[i].X_Value - P_Serie.Data[i].X_Value), 2);
                    sum2 += Math.Pow((O_Serie.Data[i].X_Value - oMean), 2);
                }
                if (sum2 == 0)
                {
                    return(double.NegativeInfinity);
                }
                else
                {
                    nash = 1 - (sum1 / sum2);
                }
            }
            catch (Exception ex)
            {
                nash = double.NaN;
                throw ex;
            }
            return(nash);
        }
Example #4
0
            private int[] GetLayersStruct(DataSerie1D ds1)
            {
                int iCount = 2;

                int[] result = null;

                if ((object.Equals(ds1, null)) || (object.Equals(ds1.Data, null)))
                {
                    result    = new int[iCount];
                    result[0] = this.ANN_Inputs_Count;
                    result[1] = this.ANN_Outputs_Count;
                }
                else
                {
                    iCount = ds1.Data.Count + 1;

                    result = new int[iCount];

                    for (int i = 0; i < (iCount - 1); i++)
                    {
                        result[i] = (Int32)Math.Round(ds1.Data[i].X_Value, 0);
                    }

                    result[(iCount - 1)] = this.ANN_Outputs_Count;
                }
                return(result);
            }
        public DataSerie1D Forecast(DataSerie1D initialSerie, bool addMonths, int startingMonth)
        {
            int         futureCount = FutureHorizon;
            DataSerie1D result      = new DataSerie1D();

            if (Equals(NeuralNetEngine, null))
            {
                return(null);
            }
            int neuralDimension = NeuralNetEngine.InputsCount - 1;
            int dataDimension   = initialSerie.Count;

            if (addMonths)
            {
                try
                {
                    if (neuralDimension > dataDimension)
                    {
                        throw new Exception("Err in input data.");
                    }

                    for (int k = (dataDimension - neuralDimension); k < dataDimension; k++)
                    {
                        result.Add(initialSerie.Data[k].Title, initialSerie.Data[k].X_Value);
                    }

                    //------------------------------------
                    int    monthIndex = startingMonth + neuralDimension;
                    double monthStand;
                    //------------------------------------

                    double[] inputs      = new double[neuralDimension + 1];
                    double   resultvalue = 0;

                    for (int i = 0; i < futureCount; i++)
                    {
                        for (int j = 0; j < neuralDimension; j++)
                        {
                            inputs[j] = result.Data[(j + i)].X_Value;
                        }
                        //------------------------------------------------
                        monthStand = (GetMonthIndex(monthIndex) * 0.9) / 12;
                        inputs[neuralDimension] = monthStand;
                        monthIndex += 1;
                        //------------------------------------------------
                        resultvalue = NeuralNetEngine.Compute(inputs)[0];
                        result.Add(i.ToString(), resultvalue);
                    }
                }
                catch (Exception ex)
                { throw ex; }
            }
            else
            {
                result = Forecast(initialSerie);
            }
            return(result);
        }
Example #6
0
        public void GetLayersStructTest1()
        {
            DataSerie1D hidenLayersStruct = new DataSerie1D();

            hidenLayersStruct.Add("HL1", 4);
            hidenLayersStruct.Add("HL2", 5);
            hidenLayersStruct.Add("HL3", 2);
            int inp = 2;

            int[] AnnStruct = new int[] { 4, 5, 2, 1 };
            Assert.Equal(AnnStruct, _tst.GetLayersStruct(hidenLayersStruct, inp, 1));
        }
Example #7
0
            private double[][] Convert(DataSerie1D ds1)
            {
                int iCount = ds1.Data.Count;

                double[][] result = new double[iCount][];

                for (int i = 0; i < iCount; i++)
                {
                    result[i] = new double[] { ds1.Data[i].X_Value };
                }

                return(result);
            }
Example #8
0
            public void StandardizeData(DataSerie1D dseries, ActivationFunctionEnum activeFunction)
            {
                if (Equals(dseries, null))
                {
                    return;
                }
                if (Equals(dseries.Data, null))
                {
                    return;
                }

                bool doStand = false;

                switch (activeFunction)
                {
                case ActivationFunctionEnum.SigmoidFunction:

                    foreach (DataItem1D ditem in dseries.Data)
                    {
                        if ((ditem.X_Value < 0) || (ditem.X_Value > 1))
                        {
                            doStand = true;
                            break;
                        }
                        if (doStand)
                        {
                            break;
                        }
                    }

                    if (doStand)
                    {
                        double mins = dseries.Min;
                        double maxs = dseries.Max - mins;

                        foreach (DataItem1D ditem in dseries.Data)
                        {
                            ditem.X_Value = (ditem.X_Value - mins) / maxs;
                        }
                    }

                    break;

                case ActivationFunctionEnum.BipolarSigmoidFunction:
                    break;

                case ActivationFunctionEnum.LinearFunction:
                    break;
                }
            }
Example #9
0
            public DataSerie1D Compute(DataSerieTD inputsDs)
            {
                if (Equals(inputsDs, null))
                {
                    return(null);
                }
                if (Equals(inputsDs.Data, null))
                {
                    return(null);
                }
                if (inputsDs.Data.Count < 1)
                {
                    return(null);
                }

                if (Equals(Network, null))
                {
                    return(null);
                }

                StandardizeData(inputsDs, this.mActivationFunction);

                DataSerie1D resultDs = new DataSerie1D();

                double[] result = null;

                int jCount = inputsDs.Data[0].List.Count();

                if (jCount < 1)
                {
                    return(null);
                }

                double[] input = new double[jCount];

                foreach (DataItemTD dItem in inputsDs.Data)
                {
                    for (int j = 0; j < jCount; j++)
                    {
                        input[j] = dItem.List[j];
                    }

                    result = Network.Compute(input);

                    resultDs.Add(dItem.Title, result[0]);
                }

                return(resultDs);
            }
Example #10
0
        /// <summary>
        /// Computation of Determination Coefficient (R2=R^2) of Two Series.
        /// </summary>
        /// <param name="O_Serie">Observed data series</param>
        /// <param name="P_Serie">Predicted data series</param>
        /// <returns>R2 Value</returns>
        public static double Compute_DeterminationCoeff_R2(DataSerie1D O_Serie, DataSerie1D P_Serie)
        {
            double rValue = Compute_CorrelationCoeff_R(O_Serie, P_Serie);

            if (rValue == double.NaN)
            {
                return(double.NaN);
            }
            else if (Object.Equals(rValue, double.PositiveInfinity) || object.Equals(rValue, double.NegativeInfinity))
            {
                return(double.PositiveInfinity);
            }
            else
            {
                return(Math.Pow(rValue, 2));
            }
        }
Example #11
0
        public DataSerie1D Forecast(DataSerie1D initialSerie)
        {
            int         futureCount = FutureHorizon;
            DataSerie1D result      = new DataSerie1D();

            if (Equals(NeuralNetEngine, null))
            {
                return(null);
            }
            int neuralDimension = NeuralNetEngine.InputsCount;
            int dataDimension   = initialSerie.Count;

            try
            {
                if (neuralDimension > dataDimension)
                {
                    throw new Exception("Err in input data.");
                }

                for (int k = (dataDimension - neuralDimension); k < dataDimension; k++)
                {
                    result.Add(initialSerie.Data[k].Title, initialSerie.Data[k].X_Value);
                }

                double[] inputs      = new double[neuralDimension];
                double   resultvalue = 0;


                for (int i = 0; i < futureCount; i++)
                {
                    for (int j = 0; j < neuralDimension; j++)
                    {
                        inputs[j] = result.Data[(j + i)].X_Value;
                    }

                    resultvalue = NeuralNetEngine.Compute(inputs)[0];
                    result.Add(i.ToString(), resultvalue);
                }
            }
            catch (Exception ex)
            { throw ex; }

            return(result);
        }
Example #12
0
        /// <summary>
        /// Computation of Correlation Coefficient (R) of Two Series.
        /// </summary>
        /// <param name="O_Serie">Observed data series</param>
        /// <param name="P_Serie">Predicted data series</param>
        /// <returns>R Value</returns>
        public static double Compute_CorrelationCoeff_R(DataSerie1D O_Serie, DataSerie1D P_Serie)
        {
            double rValue = double.NaN;

            if (CheckDataSeries(O_Serie, P_Serie) == false)
            {
                return(rValue);
            }
            try
            {
                double sum1 = 0;
                double sum2 = 0;
                double sum3 = 0;

                int    N     = O_Serie.Count;
                double oMean = O_Serie.Mean;
                double pMean = P_Serie.Mean;

                for (int i = 0; i < N; i++)
                {
                    sum1 += ((O_Serie.Data[i].X_Value - oMean) * (P_Serie.Data[i].X_Value - pMean));
                    sum2 += Math.Pow((O_Serie.Data[i].X_Value - oMean), 2);
                    sum3 += Math.Pow((P_Serie.Data[i].X_Value - pMean), 2);
                }
                if (sum2 == 0 || sum3 == 0)
                {
                    if (sum1 >= 0)
                    {
                        return(double.PositiveInfinity);
                    }
                    else
                    {
                        return(double.NegativeInfinity);
                    }
                }
                else
                {
                    rValue = sum1 / (Math.Sqrt((sum2 * sum3)));
                }
            }

            catch (Exception ex) { rValue = double.NaN; throw ex; }
            return(rValue);
        }
Example #13
0
        /// <summary>
        /// Computation of Mean Absolute Error (MAE) of Two data series.
        /// </summary>
        /// <param name="O_Serie">Observed data series</param>
        /// <param name="P_Serie">Predicted data series</param>
        /// <returns>MAE Value</returns>
        public static double Compute_MAE(DataSerie1D O_Serie, DataSerie1D P_Serie)
        {
            double mae = double.NaN;

            if (CheckDataSeries(O_Serie, P_Serie) == false)
            {
                return(mae);
            }
            try
            {
                double sum = 0;
                int    N   = O_Serie.Count;
                for (int i = 0; i < N; i++)
                {
                    sum += Math.Abs((O_Serie.Data[i].X_Value - P_Serie.Data[i].X_Value));
                }
                mae = sum / N;
            }

            catch (Exception ex) { mae = double.NaN;  throw ex; }
            return(mae);
        }
Example #14
0
        /// <summary>
        /// Computation of Root Mean Square Error (RMSE) of Two data series.
        /// </summary>
        /// <param name="O_Serie">Observed data series</param>
        /// <param name="P_Serie">Predicted data series</param>
        /// <returns>RMSE Value</returns>
        public static double  Compute_RMSE(DataSerie1D O_Serie, DataSerie1D P_Serie)
        {
            double rmse = double.NaN;

            if (CheckDataSeries(O_Serie, P_Serie) == false)
            {
                return(rmse);
            }
            try
            {
                double sum = 0;
                int    N   = O_Serie.Count;
                for (int i = 0; i < N; i++)
                {
                    sum += Math.Pow((O_Serie.Data[i].X_Value - P_Serie.Data[i].X_Value), 2);
                }
                rmse = sum / N;
                rmse = Math.Sqrt(rmse);
            }
            catch (Exception ex) { rmse = double.NaN; throw ex; }
            return(rmse);
        }
Example #15
0
        private static bool CheckDataSeries(DataSerie1D O_Serie, DataSerie1D P_Serie)
        {
            bool result = true;

            if (object.Equals(O_Serie, null) || object.Equals(O_Serie.Data, null))
            {
                return(false);
            }
            if (object.Equals(P_Serie, null) || object.Equals(P_Serie.Data, null))
            {
                return(false);
            }
            if (O_Serie.Count != P_Serie.Count)
            {
                return(false);
            }
            if (O_Serie.Count < 1)
            {
                return(false);
            }
            return(result);
        }
Example #16
0
        private double[][] Convert(DataSerie1D ds1)
        {
            if (object.Equals(ds1, null))
            {
                return(null);
            }
            if (object.Equals(ds1.Data, null))
            {
                return(null);
            }

            int iCount = ds1.Data.Count;

            double[][] result = new double[iCount][];

            for (int i = 0; i < iCount; i++)
            {
                result[i] = new double[] { ds1.Data[i].X_Value };
            }

            return(result);
        }
Example #17
0
        static void LaunchANN(double[][] matrix, double[] vector)
        {
            //double[] vector = new double[] { 0.2, 0.2, 0.6, 0.9, 0.5};
            //double[][] matrix = new double[][]
            //{
            //    new double[]{0.1, 0.1},
            //    new double[]{0.2, 0.0},
            //    new double[]{0.3, 0.3},
            //    new double[]{0.4, 0.5},
            //    new double[]{0.25, 0.25}
            //};

            DataSerie1D annStrct = new DataSerie1D();

            annStrct.Add("HL1", 4);
            annStrct.Add("HL2", 2);

            NeuralNetworkEngineEO ann = new NeuralNetworkEngineEO(matrix, vector, annStrct);

            ann.LearningMaxIterations    = 100;
            ann.LearningError            = 0.0001;
            ann.LearningAlgorithm        = LearningAlgorithmEnum.LevenbergMarquardtLearning;
            ann.LearningAlgorithm_Params = new double[] { 0.1, 10 };
            ann.ActivationFunction       = ActivationFunctionEnum.BipolarSigmoidFunction;
            ann.ActiveFunction_Params    = new double[] { 2.00 };


            ann.Learn();
            //-0.5440211109;-0.2720105555
            var ans = ann.Compute(new double[] { 0.28, 0.29 });

            foreach (var valu in ans)
            {
                Console.WriteLine("ans = {0}", Math.Round(valu, 3));
            }

            Console.WriteLine("Final teaching err = {0}; MaxIter ={1}.", ann.FinalTeachingError, ann.FinalIterationsCount);
        }
Example #18
0
            /// <summary>
            /// The data must be standarize before learning.
            /// </summary>
            /// <param name="hidenLayerStructure"></param>
            public void Learn(DataSerie1D hidenLayerStructure)
            {
                // Step 0 : Check data
                if (CheckData() == false)
                {
                    return;
                }
                _BestNeuralNetwork = new NeuralNetworkEngineEO();

                // Step 1 : Standerize Data and get Input data;
                //------------------------------------------------

                _BestNeuralNetwork.Learning_Inputs  = this.LearningInputs;
                _BestNeuralNetwork.Learning_Outputs = ConvertToJagged(this.LearningOutputs);

                /// Step 2 : set ANN's structure, activation function and params,
                _BestNeuralNetwork.LayersStruct = GetLayersStruct(hidenLayerStructure, this.LearningInputs[0].Length, 1);

                _BestNeuralNetwork.ActivationFunction       = DefaultActivationFunction;
                _BestNeuralNetwork.LearningAlgorithm_Params = DefaultActiveFunction_Params;

                _BestNeuralNetwork.Learn();
            }
Example #19
0
        public DataSerie1D Forecast2(DataSerie1D initialSerie)
        {
            int         futureCount     = FutureHorizon;
            DataSerie1D result          = new DataSerie1D();
            int         neuralDimension = NeuralNetEngine.InputsCount;
            int         dataDimension   = 1;
            int         dataCount       = initialSerie.Count;

            try
            {
                if (neuralDimension != dataDimension)
                {
                    throw new Exception("Err in input data.");
                }

                double[] inputs      = new double[neuralDimension];
                double   resultvalue = 0;

                //for (int k = 0; k < neuralDimension; k++)
                //{ result.Add(initialSerie.Data[k].Title, initialSerie.Data[k].X_Value, -100); }

                for (int i = 0; i < futureCount; i++)
                {
                    for (int j = 0; j < neuralDimension; j++)
                    {
                        inputs[j] = initialSerie.Data[i].X_Value;
                    }

                    resultvalue = NeuralNetEngine.Compute(inputs)[0];
                    result.Add(initialSerie.Data[i].Title, resultvalue);
                }
            }
            catch (Exception ex)
            { throw ex; }

            return(result);
        }
Example #20
0
            /// <summary>
            /// Standardize Data between [0, 1].
            /// </summary>
            /// <param name="dseries"></param>
            public void StandardizeData(DataSerie1D dseries)
            {
                if (Equals(dseries, null))
                {
                    return;
                }
                if (Equals(dseries.Data, null))
                {
                    return;
                }

                bool doStand = false;

                foreach (DataItem1D ditem in dseries.Data)
                {
                    if ((ditem.X_Value < 0) || (ditem.X_Value > 1))
                    {
                        doStand = true;
                        break;
                    }
                    if (doStand)
                    {
                        break;
                    }
                }

                if (doStand)
                {
                    double mins = dseries.Min;
                    double maxs = dseries.Max - mins;

                    foreach (DataItem1D ditem in dseries.Data)
                    {
                        ditem.X_Value = (ditem.X_Value - mins) / maxs;
                    }
                }
            }
Example #21
0
        /// <summary>
        /// Agreement Index (Willmot Index, Willmot (1981) computation of Two Data Series.
        /// </summary>
        /// <param name="O_Serie">Observed data series</param>
        /// <param name="P_Serie">Predicted data series</param>
        /// <returns>d Value</returns>
        public static double Compute_Agreement_Index(DataSerie1D O_Serie, DataSerie1D P_Serie)
        {
            double dValue = double.NaN;

            if (CheckDataSeries(O_Serie, P_Serie) == false)
            {
                return(double.NaN);
            }

            try
            {
                double sum1  = 0;
                double sum2  = 0;
                int    N     = O_Serie.Count;
                double oMean = O_Serie.Mean;
                for (int i = 0; i < N; i++)
                {
                    sum1 += Math.Pow((O_Serie.Data[i].X_Value - P_Serie.Data[i].X_Value), 2);
                    sum2 += Math.Pow((Math.Abs((O_Serie.Data[i].X_Value - oMean)) + Math.Abs((P_Serie.Data[i].X_Value - oMean))), 2);
                }
                if (sum2 == 0)
                {
                    return(double.NegativeInfinity);
                }
                else
                {
                    dValue = 1 - (sum1 / sum2);
                }
            }
            catch (Exception ex)
            {
                dValue = double.NaN;
                throw ex;
            }

            return(dValue);
        }
 public NeuralNetworkEngineEO(double[][] trainingIn, double[] trainingOut, DataSerie1D annHidenLayers)
 {
     this.LearningAlgorithm  = LearningAlgorithmEnum.LevenbergMarquardtLearning;
     this.ActivationFunction = ActivationFunctionEnum.SigmoidFunction;
     this.Learning_Inputs    = trainingIn;
     this.Learning_Outputs   = ConvertToJagged(trainingOut);
     this.networkStruct      = GetLayersStruct(annHidenLayers, trainingIn[0].Length, 1);
 }
Example #23
0
            //[Category("GWO Parameters")] public GWOVersionEnum GWOVersion { get; set; } = GWOVersionEnum.StandardGWO;
            //[Category("GWO Parameters")] public double IGWO_uParameter { get; set; } = 1.1;


            private void Initialize()
            {
                List <MonoObjectiveEOALib.Range> intervales;

                switch (this.Learning_Algorithm)
                {
                case LearningAlgorithmEnum.BackPropagationLearning:
                    intervales = new List <MonoObjectiveEOALib.Range>
                    {
                        new MonoObjectiveEOALib.Range("Activation Function", 0.8, 2.4),
                        new MonoObjectiveEOALib.Range("Alpha of Activation Function", 0.2, 5),
                        new MonoObjectiveEOALib.Range("Learning rate", 0.1, 1),
                        new MonoObjectiveEOALib.Range("Learning Err", 0.01, 1),
                        new MonoObjectiveEOALib.Range("Max Iteration (Kmax)", MinLearningIterations, MaxLearningIterations),
                        new MonoObjectiveEOALib.Range("Hiden Layer Number", 1, 5),
                        new MonoObjectiveEOALib.Range("Layer 1 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 2 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 3 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 4 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 5 Nodes count", MinNeuronesCount, MaxNeuronesCount)
                        //new MonoObjectiveEOALib.Range("Layer 6 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        //new MonoObjectiveEOALib.Range("Layer 7 Nodes count", MinNeuronesCount, MaxNeuronesCount)
                    };
                    break;

                case LearningAlgorithmEnum.LevenbergMarquardtLearning:
                    intervales = new List <MonoObjectiveEOALib.Range>
                    {
                        new MonoObjectiveEOALib.Range("Activation Function", 0.8, 2.4),
                        new MonoObjectiveEOALib.Range("Alpha of Activation Function", 0.2, 5),
                        new MonoObjectiveEOALib.Range("Learning rate", 0.1, 1),
                        new MonoObjectiveEOALib.Range("Learning Err", 0.01, 1),
                        new MonoObjectiveEOALib.Range("Max Iteration (Kmax)", MinLearningIterations, MaxLearningIterations),
                        new MonoObjectiveEOALib.Range("Hiden Layer Number", 1, 5),
                        new MonoObjectiveEOALib.Range("Layer 1 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 2 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 3 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 4 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 5 Nodes count", MinNeuronesCount, MaxNeuronesCount)
                        //new MonoObjectiveEOALib.Range("Layer 6 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        //new MonoObjectiveEOALib.Range("Layer 7 Nodes count", MinNeuronesCount, MaxNeuronesCount)
                    };
                    break;

                case LearningAlgorithmEnum.EvolutionaryLearningGA:
                    intervales = new List <MonoObjectiveEOALib.Range>
                    {
                        new MonoObjectiveEOALib.Range("Activation Function", 0.8, 2.4),
                        new MonoObjectiveEOALib.Range("Alpha of Activation Function", 0.2, 5),
                        new MonoObjectiveEOALib.Range("Population size", 30, 120),
                        new MonoObjectiveEOALib.Range("Learning Err", 0.01, 1),
                        new MonoObjectiveEOALib.Range("Max Iteration (Kmax)", MinLearningIterations, MaxLearningIterations),
                        new MonoObjectiveEOALib.Range("Hiden Layer Number", 1, 5),
                        new MonoObjectiveEOALib.Range("Layer 1 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 2 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 3 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 4 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 5 Nodes count", MinNeuronesCount, MaxNeuronesCount)
                        //new MonoObjectiveEOALib.Range("Layer 6 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        //new MonoObjectiveEOALib.Range("Layer 7 Nodes count", MinNeuronesCount, MaxNeuronesCount)
                    };
                    break;

                case LearningAlgorithmEnum.HPSOGWO_Learning:
                    intervales = new List <MonoObjectiveEOALib.Range>
                    {
                        new MonoObjectiveEOALib.Range("Activation Function", 0.8, 2.4),
                        new MonoObjectiveEOALib.Range("Alpha of Activation Function", 0.2, 5),
                        new MonoObjectiveEOALib.Range("Population size", 30, 120),
                        new MonoObjectiveEOALib.Range("C1", 0.05, 5),
                        new MonoObjectiveEOALib.Range("C2", 0.05, 5),
                        new MonoObjectiveEOALib.Range("C3", 0.05, 5),
                        new MonoObjectiveEOALib.Range("Learning Err", 0.01, 1),
                        new MonoObjectiveEOALib.Range("Max Iteration (Kmax)", MinLearningIterations, MaxLearningIterations),
                        new MonoObjectiveEOALib.Range("Hiden Layer Number", 1, 4),
                        new MonoObjectiveEOALib.Range("Layer 1 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 2 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 3 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 4 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        new MonoObjectiveEOALib.Range("Layer 5 Nodes count", MinNeuronesCount, MaxNeuronesCount)
                        //new MonoObjectiveEOALib.Range("Layer 6 Nodes count", MinNeuronesCount, MaxNeuronesCount),
                        //new MonoObjectiveEOALib.Range("Layer 7 Nodes count", MinNeuronesCount, MaxNeuronesCount)
                    };
                    break;

                default:
                    throw new NotImplementedException();
                }

                //gsaOpim.Intervalles.Add(new MonoObjectiveEOALib.Range("Additional Params (Population zize for GA", 5, 20));

                if (optimAlgo == OptimizationAlogrithmEnum.GSA_Optimizer)
                {
                    throw new NotImplementedException();
                    //GsaOptim = new GSA_Optimizer();
                    // GsaOptim.Alpha = GSA_Alpha;
                    //GsaOptim.G0 = GSA_G0;
                    // GsaOptim.D_Dimensions = GetSearchSpaceDimension(Learning_Algorithm);
                    //GsaOptim.MaxIterations = MaxIterations ;
                    //GsaOptim.N_Agents = PopulationSize;
                    //GsaOptim.OptimizationType = RmosEngine.OptimizationTypeEnum.Minimization;
                    //GsaOptim.ElitistCheck = RmosEngine.ElitistCheckEnum.Equation21;

                    // Set intervalles :
                    //GsaOptim.Intervalles = intervales;
                }
                else if (optimAlgo == OptimizationAlogrithmEnum.GA_Optimizer)
                {
                    throw new NotImplementedException();

                    // GaOptim = new GAOptimizer();
                    // GaOptim.GenomesLenght = GetSearchSpaceDimension(this.Learning_Algorithm);
                    // GaOptim.InitialPopulation = this.PopulationSize;
                    // GaOptim.MaxIteration = this.MaxIterations; ;
                    // GaOptim.PopulationLimit = this.PopulationSize;
                    // GaOptim.DeathFitnessLimit = 10 ^ -6;
                    // GaOptim.Save_2_BestCrossover = true;
                    // GaOptim.Intervalles = intervales;
                    //GaOptim.MutationFrequency = 0.2f;
                }
                else if (optimAlgo == OptimizationAlogrithmEnum.HPSOGWO_Optimizer)
                {
                    throw new NotImplementedException();

                    //HpsoGwoOptim = new HybridPSOGWOptimizer();
                    //HpsoGwoOptim.C_1 = HPSOGWO_C1;
                    //HpsoGwoOptim.C_2 = HPSOGWO_C2;
                    //HpsoGwoOptim.C_3 = HPSOGWO_C3;
                    // HpsoGwoOptim.Dimensions = GetSearchSpaceDimension(this.Learning_Algorithm);
                    // HpsoGwoOptim.OptimizationType = RmosEngine.OptimizationTypeEnum.Minimization;
                    //HpsoGwoOptim.WolvesCount = PopulationSize;
                    // HpsoGwoOptim.Intervalles = intervales;
                    //HpsoGwoOptim.IterationsCount = MaxIterations;
                }
                else if (optimAlgo == OptimizationAlogrithmEnum.GWO__Optimizer)
                {
                    throw new NotImplementedException();
                    // GwoOptim = new GWOptimizer();
                    // GwoOptim.GWOVersion = GWOVersion;
                    // GwoOptim.IGWO_uParameter = IGWO_uParameter;
                    //GwoOptim.Dimensions= GetSearchSpaceDimension(this.Learning_Algorithm);
                    //GwoOptim.OptimizationType = RmosEngine.OptimizationTypeEnum.Minimization;
                    // GwoOptim.WolvesCount = PopulationSize;
                    //GwoOptim.Intervalles = intervales;
                    // GwoOptim.IterationsCount = MaxIterations;
                }
                //Initialize results series :
                LearningErr_Result       = new DataSerie1D();
                LearningIteration_Result = new DataSerie1D();
                Best_Chart    = new DataSerie1D();
                Best_Solution = new DataSerie1D();
            }
Example #24
0
        public void Formate(bool includeMonths, int startingMonth)
        {
            if (includeMonths == false)
            {
                Formate();
            }
            else
            {
                if (object.Equals(Data, null))
                {
                    return;
                }
                if (object.Equals(Data.Data, null))
                {
                    return;
                }
                int dataCount = Data.Count;
                if (dataCount < 1)
                {
                    return;
                }

                TrainingCount = (int)((TrainingRate * dataCount) / 100);
                TestingCount  = (dataCount - TrainingCount);

                mTrainingInputs = new DataSerieTD()
                {
                    Name = "Training inputs"
                };
                mTrainingOutputs = new DataSerie1D()
                {
                    Name = "Training outputs"
                };

                int monthIndex = startingMonth + mAnnInputLayerCount;

                double monthStand = 0;

                for (int i = 0; i < (TrainingCount - mAnnInputLayerCount); i++)
                {
                    double[] qj = new double[mAnnInputLayerCount + 1];

                    for (int j = 0; j < mAnnInputLayerCount; j++)
                    {
                        qj[j] = Data.Data[(i + j)].X_Value;
                    }

                    //------------------------------------
                    monthStand = (GetMonthIndex(monthIndex) * 0.9) / 12;
                    qj[mAnnInputLayerCount] = monthStand;
                    monthIndex += 1;
                    //------------------------------------
                    mTrainingInputs.Add(i.ToString(), qj);
                    mTrainingOutputs.Add(i.ToString(), Data.Data[(i + mAnnInputLayerCount)].X_Value);
                }

                mTestingInputs = new DataSerieTD()
                {
                    Name = "Testing inputs"
                };

                mTestingOutputs = new DataSerie1D()
                {
                    Name = "Testing outputs"
                };

                int k = TrainingCount;

                for (int i = 0; i < (TestingCount - mAnnInputLayerCount); i++)
                {
                    double[] qj = new double[mAnnInputLayerCount + 1];

                    for (int j = 0; j < mAnnInputLayerCount; j++)
                    {
                        qj[j] = Data.Data[(k + i + j)].X_Value;
                    }
                    //------------------------------------
                    monthStand = (GetMonthIndex(monthIndex) * 0.9) / 12;
                    qj[mAnnInputLayerCount] = monthStand;
                    monthIndex += 1;
                    //------------------------------------

                    mTestingInputs.Add((i + k).ToString(), qj);
                    mTestingOutputs.Add((i + k).ToString(), Data.Data[(k + i + mAnnInputLayerCount)].X_Value);
                }
            }
        }
Example #25
0
        public void FormateDefault()
        {
            if (object.Equals(Data, null))
            {
                return;
            }
            if (object.Equals(Data.Data, null))
            {
                return;
            }
            int dataCount = Data.Count;

            if (dataCount < 1)
            {
                return;
            }

            TrainingCount = (int)((TrainingRate * dataCount) / 100);
            TestingCount  = (dataCount - TrainingCount);

            mTrainingInputs = new DataSerieTD()
            {
                Name = "Training inputs"
            };
            mTrainingOutputs = new DataSerie1D()
            {
                Name = "Training outputs"
            };


            for (int i = 0; i < (TrainingCount - mAnnInputLayerCount); i++)
            {
                double[] qj = new double[mAnnInputLayerCount];

                for (int j = 0; j < mAnnInputLayerCount; j++)
                {
                    qj[j] = Data.Data[(i + j)].X_Value;
                }
                mTrainingInputs.Add(i.ToString(), qj);
                mTrainingOutputs.Add(i.ToString(), Data.Data[(i + mAnnInputLayerCount)].X_Value);
            }

            mTestingInputs = new DataSerieTD()
            {
                Name = "Testing inputs"
            };
            mTestingOutputs = new DataSerie1D()
            {
                Name = "Testing outputs"
            };
            int k = TrainingCount;

            for (int i = 0; i < (TestingCount - mAnnInputLayerCount); i++)
            {
                double[] qj = new double[mAnnInputLayerCount];
                for (int j = 0; j < mAnnInputLayerCount; j++)
                {
                    qj[j] = Data.Data[(k + i + j)].X_Value;
                }
                mTestingInputs.Add((i + k).ToString(), qj);
                mTestingOutputs.Add((i + k).ToString(), Data.Data[(k + i + mAnnInputLayerCount)].X_Value);
            }
        }
Example #26
0
        /// <summary>
        /// Formate Training and Testing (inputs-outputs) by specific time series indexes.
        /// </summary>
        /// <param name="annInputModel"> The ANN input model </param>
        private void Formate(DataSerie1D annInputModel)
        {
            try
            {
                if (object.Equals(Data, null))
                {
                    return;
                }
                if (object.Equals(Data.Data, null))
                {
                    return;
                }
                int dataCount = Data.Count;
                if (dataCount < 1)
                {
                    return;
                }

                if (object.Equals(annInputModel, null))
                {
                    throw new Exception("No time series indexes pattern are found.");
                }
                if (object.Equals(annInputModel.Data, null))
                {
                    throw new Exception("No time series indexes pattern are found.");
                }
                if (annInputModel.Count < 1)
                {
                    throw new Exception("No input model is specified.");
                }

                if (annInputModel.Max > dataCount)
                {
                    throw new Exception("Index great then data serie count.");
                }

                TrainingCount = (int)((TrainingRate * dataCount) / 100);
                TestingCount  = (dataCount - TrainingCount);

                mTrainingInputs = new DataSerieTD()
                {
                    Name = "Training inputs"
                };
                mTrainingOutputs = new DataSerie1D()
                {
                    Name = "Training outputs"
                };

                int annInputCount = annInputModel.Count;

                int[] indexes = new int[annInputCount];

                for (int i = 0; i < annInputCount; i++)
                {
                    if (annInputModel.Data[i].X_Value > TrainingCount || annInputModel.Data[i].X_Value > TestingCount)
                    {
                        throw new Exception("ANN Input indexes must be lower than Training and Testing data serie lenght.");
                    }
                    indexes[i] = (int)(annInputModel.Data[i].X_Value - 1);
                }

                int indexMax = (indexes.Max() - 1);

                for (int i = 0; i < (TrainingCount - indexMax); i++)
                {
                    double[] qj = new double[annInputCount];

                    for (int j = 0; j < annInputCount; j++)
                    {
                        qj[j] = Data.Data[(i + indexes[j])].X_Value;
                    }
                    mTrainingInputs.Add(i.ToString(), qj);
                    mTrainingOutputs.Add(i.ToString(), Data.Data[(i + indexMax)].X_Value);
                }

                mTestingInputs = new DataSerieTD()
                {
                    Name = "Testing inputs"
                };
                mTestingOutputs = new DataSerie1D()
                {
                    Name = "Testing outputs"
                };
                int k = TrainingCount;

                for (int i = 0; i < (TestingCount - indexMax); i++)
                {
                    double[] qj = new double[annInputCount];
                    for (int j = 0; j < annInputCount; j++)
                    {
                        qj[j] = Data.Data[(k + i + indexes[j])].X_Value;
                    }
                    mTestingInputs.Add((i + k).ToString(), qj);
                    mTestingOutputs.Add((i + k).ToString(), Data.Data[(k + i + indexMax)].X_Value);
                }
            }
            catch (Exception ex) { throw ex; }
        }
Example #27
0
            public void Compute()
            {
                try
                {                 // Step 1 : Standerize Data and get Input data;
                    StandardizeData(Obs_TrainingInputs);
                    StandardizeData(Obs_TrainingOutputs);
                    StandardizeData(mObs_TestingInputs);

                    mObs_Training_Inputs  = DataSerieTD.Convert(Obs_TrainingInputs);
                    mObs_Training_Outputs = Obs_TrainingOutputs.GetArray();

                    ANN_Input_Count  = mObs_Training_Inputs[0].Length;
                    ANN_Output_Count = mObs_Training_Outputs[0].Length;
                    //---------------Starting :----------------------------
                    Chronos.Start();
                    // Step 2 : Initialize optimizer :
                    Initialize();
                    //------------Optimization------------------
                    switch (optimAlgo)
                    {
                    case OptimizationAlogrithmEnum.GA_Optimizer:
                        throw new NotImplementedException();
                        break;

                    case OptimizationAlogrithmEnum.GSA_Optimizer:
                        throw new NotImplementedException();
                        break;

                    case OptimizationAlogrithmEnum.GWO__Optimizer:
                        throw new NotImplementedException();
                        break;

                    case OptimizationAlogrithmEnum.HPSOGWO_Optimizer:
                        throw new NotImplementedException();

                        // Set Optimization function :
                        //HpsoGwoOptim.ObjectiveFunctionComputation += HpsoGwoOptim_ObjectiveFunctionComputation;
                        //Computation:
                        //HpsoGwoOptim.LuanchComputation();
                        //----------------------------------------------------------------------
                        //CopySolution(HpsoGwoOptim.BestSolution);

                        //for (int j = 0; j < HpsoGwoOptim.BestChart.Count; j++)
                        // {
                        //     Best_Chart.Add(j.ToString(), HpsoGwoOptim.BestChart[j]);
                        // }

                        break;
                    }

                    Chronos.Stop();
                    mComputationDuration = Chronos.ElapsedMilliseconds;

                    //--Compute Training and Testing Outputs :
                    if (Equals(BestNeuralNetwork, null))
                    {
                        return;
                    }
                    if (Equals(this.Obs_Training_Inputs, null))
                    {
                        return;
                    }
                    if (Equals(this.mObs_Testing_Inputs, null))
                    {
                        return;
                    }

                    mComputed_TrainingOutputs = DataSerie1D.Convert(BestNeuralNetwork.Compute(this.Obs_Training_Inputs));
                    mComputed_Testing_Outputs = BestNeuralNetwork.Compute(this.mObs_Testing_Inputs);

                    //-----------------------------
                }
                catch (Exception ex) { throw ex; }
            }
Example #28
0
 public Statistics(double[] observed_Serie, double[] predicted_Serie)
 {
     ObservedSerie  = DataSerie1D.Convert(observed_Serie);
     PredictedSerie = DataSerie1D.Convert(predicted_Serie);
 }
Example #29
0
 public Statistics(DataSerie1D observed_Serie, DataSerie1D predicted_Serie)
 {
     this.ObservedSerie  = observed_Serie;
     this.PredictedSerie = predicted_Serie;
 }
Example #30
0
 public PerformanceMesure(DataSerie1D observed_Serie, DataSerie1D predicted_Serie)
 {
     this.ObservedSerie  = observed_Serie;
     this.PredictedSerie = predicted_Serie;
 }