Пример #1
0
        public override void Train(IForecastingDataSets datasets)
        {
            Parameter svmpara = mParameter as Parameter;

            OnStartRunning(new ComponentRunEventArgs(datasets));
            double[] yvalue   = null;
            int      maxIndex = 0;

            Node[][] nodes   = CreateNodes(datasets, out yvalue, out maxIndex);
            Problem  problem = new Problem(nodes.Length, yvalue, nodes, maxIndex);

            mRange  = Scaling.DetermineRange(problem);
            problem = Scaling.Scale(mRange, problem);

            TrainedModel            = Training.Train(problem, svmpara as Parameter);
            datasets.ForecastedData = new double[datasets.InputData.Length][];

            for (int i = 0; i < datasets.InputData.Length; i++)
            {
                datasets.ForecastedData[i]    = new double[1];
                datasets.ForecastedData[i][0] = Forecast(datasets.InputData[i]);
                OnRunningEpoch(new ComponentRunEpochEventArgs(i));
            }

            svmpara.Count      = TrainedModel.SupportVectorCount;
            svmpara.Percentage = TrainedModel.SupportVectorCount / (double)problem.Count;

            OnFinishRunning(new ComponentRunEventArgs(datasets));
        }
        public IForecastingDataSets Select(IPredicationSchema schema)
        {
            int startYear  = schema.Start.Year;
            int startMonth = schema.Start.Month;
            int startDay   = schema.Start.Day;
            int endYear    = schema.End.Year;
            int endMonth   = schema.End.Month;
            int endDay     = schema.End.Day;

            IForecastingDataSets[] sets = new IForecastingDataSets[endYear - startYear + 1];
            for (int i = startYear; i <= endYear; i++)
            {
                DateTime          start        = new DateTime(i, startMonth, startDay);
                DateTime          end          = new DateTime(i, endMonth, endDay);
                PredicationSchema clonedSchema = new PredicationSchema(schema.Stimulus, schema.Responses)
                {
                    Start      = start,
                    End        = end,
                    TimePeriod = schema.TimePeriod
                };
                RollingSelection rollSlct = new RollingSelection();
                sets[i - startYear] = rollSlct.Select(clonedSchema);
            }
            ForecastingDataSets result = ForecastingDataSets.Merge(sets);

            schema.InstancesCount = result.InputData.Length;
            return(result);
        }
Пример #3
0
        public override void Train(IForecastingDataSets datasets)
        {
            OnStartRunning(new ComponentRunEventArgs(datasets));
            AnnModelParameter para = mParameter as AnnModelParameter;

            LinearLayer inputLayer = new LinearLayer(datasets.InputData[0].Length);

            SigmoidLayer hiddenLayer = new SigmoidLayer(para.HiddenNeuronsCount[0]);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            new BackpropagationConnector(inputLayer, hiddenLayer).Initializer  = new RandomFunction(0d, 0.3d);
            new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d);
            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.SetLearningRate(para.LearningRate);
            network.JitterEpoch      = para.JitterEpoch;
            network.JitterNoiseLimit = para.JitterNoiseLimit;
            network.EndEpochEvent   += new TrainingEpochEventHandler(
                delegate(object senderNetwork, TrainingEpochEventArgs args)
            {
                // TODO: trainning error needs to be calculated
                OnRunningEpoch(new AnnModelRunEpochEventArgs(args.TrainingIteration + 1, 0));
            });

            network.Learn(ForecastingDataSets.ConvertToTrainingSet(datasets), para.Iterations);

            datasets.ForecastedData = new double[datasets.InputData.Length][];
            for (int i = 0; i < datasets.InputData.Length; i++)
            {
                datasets.ForecastedData[i]    = new double[1];
                datasets.ForecastedData[i][0] = Forecast(datasets.InputData[i]);
            }
            OnFinishRunning(new ComponentRunEventArgs(datasets));
        }
Пример #4
0
 private void BuildRuleTree(Rule rule, IForecastingDataSets datasets)
 {
     if (rule.Type == RuleType.Interior)
     {
         if (rule.LeftChild != null)
         {
             BuildRuleTree(rule.LeftChild, datasets);
         }
         if (rule.RightChild != null)
         {
             BuildRuleTree(rule.RightChild, datasets);
         }
     }
     else
     {
         IForecastingDataSets extracted = ExtractSubSets(rule.InheritedSplitLocations.ToArray(),
                                                         rule.InheritedSplitValues.ToArray(), datasets, rule.Type);
         ModelParameter mp = new ModelParameter();
         if (extracted.Length > 0)
         {
             MLRModel mlr = new MLRModel(mp);
             mlr.Train(extracted);
             rule.Model = mlr;
         }
     }
 }
Пример #5
0
        public void Analyze(IForecastingDataSets datasets)
        {
            if (ModelStartRunning != null)
            {
                ModelStartRunning(this, new ComponentRunEventArgs(datasets));
            }
            int learningRadius = Math.Max(mSOMParameter.LayerWidth, mSOMParameter.LayerHeight) / 2;

            KohonenLayer inputLayer  = new KohonenLayer(datasets.InputData[0].Length);
            KohonenLayer outputLayer = new KohonenLayer(new Size(mSOMParameter.LayerWidth, mSOMParameter.LayerHeight),
                                                        mSOMParameter.NeighborhoodFunction, mSOMParameter.Topology);
            KohonenConnector connector = new KohonenConnector(inputLayer, outputLayer);

            connector.Initializer = new RandomFunction(0, 100);
            outputLayer.SetLearningRate(mSOMParameter.LearningRate, mSOMParameter.FinalLearningRate);
            outputLayer.IsRowCircular    = mSOMParameter.IsRowCircular;
            outputLayer.IsColumnCircular = mSOMParameter.IsColumnCircular;
            mNetwork = new KohonenNetwork(inputLayer, outputLayer);

            mNetwork.EndEpochEvent += new TrainingEpochEventHandler(
                delegate(object senderNetwork, TrainingEpochEventArgs args)
            {
                if (ModelRunningEpoch != null)
                {
                    ModelRunningEpoch(this, new ComponentRunEpochEventArgs(args.TrainingIteration));
                }
            });
            mTrainingSet = ForecastingDataSets.ConvertToUnSupervisedTrainingSet(datasets);
            mNetwork.Learn(mTrainingSet, mSOMParameter.Iterations);

            if (ModelFinishRunning != null)
            {
                ModelFinishRunning(this, new ComponentRunEventArgs(datasets));
            }
        }
Пример #6
0
        public override void Train(IForecastingDataSets datasets)
        {
            OnStartRunning(new ComponentRunEventArgs(datasets));
            NumberOfVariables = datasets.InputVectorLength;
            NumberOfSamples   = datasets.Length;
            EnvolutionStep    = 0;
            if (functionSet == null)
            {
                functionSet = new GPFunctionSet();
            }
            Initialize();
            GenerateFunction();
            double [] gpConstraints = GenerateConstants(mGPModelParameter.ConstantsIntervalFrom, mGPModelParameter.ConstantsIntervalTo, mGPModelParameter.ConstantsNumber);
            GenerateTerminals(datasets, gpConstraints);

            if (population == null)
            {
                EnvolutionStep = 1;
                population     = new GPPopulation(mGPModelParameter.PopulationSize, terminalSet, functionSet, parameters, mGPModelParameter.MultipleCore);
            }
            GPBestHromosome = population.bestChromosome;

            while (ProveEnvolution(EnvolutionStep, mGPModelParameter.EnvolveConditionValue, mGPModelParameter.EnvolveIndicator))
            {
                population.StartEvolution();
                OnRunningEpoch(new ComponentRunEpochEventArgs(EnvolutionStep));
                EnvolutionStep++;
            }

            int        indexOutput = terminalSet.NumConstants + terminalSet.NumVariables - 1;
            List <int> lst         = new List <int>();

            FunctionTree.ToListExpression(lst, GPBestHromosome.Root);
            double y = 0;

            datasets.ForecastedData = new double[datasets.Length][];
            for (int i = 0; i < terminalSet.RowCount; i++)
            {
                // evalue the function
                y = functionSet.Evaluate(lst, terminalSet, i);

                // check for correct numeric value
                if (double.IsNaN(y) || double.IsInfinity(y))
                {
                    y = 0;
                }
                datasets.ForecastedData[i]    = new double[1];
                datasets.ForecastedData[i][0] = y;
            }
            OnFinishRunning(new ComponentRunEventArgs(datasets)
            {
                State = functionSet.DecodeExpression(lst, terminalSet)
            });
        }
        public static TrainingSet ConvertToUnSupervisedTrainingSet(IForecastingDataSets sets)
        {
            TrainingSet trainingset = new TrainingSet(sets.InputData[0].Length);

            for (int i = 0; i < sets.InputData.Length; i++)
            {
                TrainingSample ts = new TrainingSample(sets.InputData[i]);
                trainingset.Add(ts);
            }
            return(trainingset);
        }
Пример #8
0
        public override void Train(IForecastingDataSets datasets)
        {
            OnStartRunning(new ComponentRunEventArgs(datasets));
            ForecastingDataSets fds = datasets as ForecastingDataSets;
            //MultipleLinearRegression mlineRegrsn = new MultipleLinearRegression(datasets.InputData, fds.GetOutputDataColumn(0), true);
            //Matrix result = new Matrix();
            //mlineRegrsn.ComputeFactorCoref(result);
            //RegressionCoefficients = result[0, Matrix.mCol];
            int solutionSize = datasets.Length;

            datasets.ForecastedData = new double[solutionSize][];
            for (int i = 0; i < solutionSize; i++)
            {
                datasets.ForecastedData[i]    = new double[1];
                datasets.ForecastedData[i][0] = Forecast(fds.InputData[i]);
            }
            OnFinishRunning(new ComponentRunEventArgs(datasets));
        }
Пример #9
0
        private void GenerateTerminals(IForecastingDataSets datasets, double[] gpConstants)
        {
            if (terminalSet == null)
            {
                terminalSet = new GPTerminalSet();
            }

            if (gpConstants == null)
            {
                gpConstants = GenerateConstants(mGPModelParameter.ConstantsIntervalFrom, mGPModelParameter.ConstantsIntervalTo, mGPModelParameter.ConstantsNumber);
            }

            //Kada znamo broj konstanti i podatke o experimenti sada mozemo popuniti podatke
            terminalSet.NumConstants = mGPModelParameter.ConstantsNumber;
            terminalSet.NumVariables = NumberOfVariables;
            terminalSet.RowCount     = NumberOfSamples;

            terminalSet.TrainingData = new double[terminalSet.RowCount][];
            int numOfVariables = terminalSet.NumVariables + terminalSet.NumConstants + 1 /*Output Value of experiment*/;

            for (int i = 0; i < terminalSet.RowCount; i++)
            {
                terminalSet.TrainingData[i] = new double[numOfVariables];
                for (int j = 0; j < numOfVariables; j++)
                {
                    if (j < terminalSet.NumVariables)
                    {
                        terminalSet.TrainingData[i][j] = datasets.InputData[i][j];
                    }
                    else if (j >= terminalSet.NumVariables && j < numOfVariables - 1)
                    {
                        terminalSet.TrainingData[i][j] = gpConstants[j - terminalSet.NumVariables];
                    }
                    else
                    {
                        terminalSet.TrainingData[i][j] = datasets.OutputData[i][0];
                    }
                }
            }
            terminalSet.CalculateStat();

            TerminateExperiments();
        }
Пример #10
0
        public override void Train(IForecastingDataSets datasets)
        {
            HybridModelParameter para = mParameter as HybridModelParameter;

            OnStartRunning(new ComponentRunEventArgs(datasets));
            if (para.Rule.Type == RuleType.Interior)
            {
                BuildRuleTree(para.Rule, datasets);
                datasets.ForecastedData = new double[datasets.InputData.Length][];
                for (int i = 0; i < datasets.InputData.Length; i++)
                {
                    datasets.ForecastedData[i] = new double[1];
                    HybridModelParameter hmp = mParameter as HybridModelParameter;
                    hmp.CurrentLeafRule           = LocateRule(datasets.InputData[i], hmp.Rule);
                    datasets.ForecastedData[i][0] = Forecast(datasets.InputData[i]);
                    OnRunningEpoch(new ComponentRunEpochEventArgs(i));
                }
            }
            OnFinishRunning(new ComponentRunEventArgs(datasets));
        }
Пример #11
0
        private Node[][] CreateNodes(IForecastingDataSets datasets, out double[] yvalue, out int maxIndex)
        {
            Node[][] nodes;
            int      rows    = datasets.InputData.Length;
            int      columns = datasets.InputData[0].Length;

            maxIndex = columns;
            nodes    = new Node[rows][];
            yvalue   = new double[rows];
            for (int r = 0; r < rows; r++)
            {
                nodes[r]  = new Node[columns];
                yvalue[r] = datasets.OutputData[r][0];
                for (int c = 0; c < columns; c++)
                {
                    nodes[r][c]       = new Node();
                    nodes[r][c].Index = c + 1;
                    nodes[r][c].Value = datasets.InputData[r][c];
                }
            }
            return(nodes);
        }
Пример #12
0
        private IForecastingDataSets ExtractSubSets(int[] splitLocations, double[] splitValues, IForecastingDataSets datasets, RuleType type)
        {
            List <double[]> listInput  = new List <double[]>();
            List <double[]> listOutput = new List <double[]>();
            List <DateTime> dates      = new List <DateTime>();

            for (int i = 0; i < datasets.Length; i++)
            {
                double[] vector = datasets.InputData[i];
                bool     isCase = true;
                int      j      = 0;
                foreach (int sl in splitLocations)
                {
                    if (type == RuleType.LeftLeaf)
                    {
                        if (vector[sl] > splitValues[j])
                        {
                            isCase = false;
                            break;
                        }
                    }
                    else if (type == RuleType.RightLeaf)
                    {
                        if (vector[sl] <= splitValues[j])
                        {
                            isCase = false;
                            break;
                        }
                    }
                    j++;
                }
                if (isCase)
                {
                    listInput.Add(vector);
                    listOutput.Add(datasets.OutputData[i]);
                    dates.Add(datasets.Date[i]);
                }
            }
            IForecastingDataSets extractedSets = new ForecastingDataSets(listInput.ToArray(), listOutput.ToArray());

            extractedSets.Date = dates.ToArray();
            return(extractedSets);
        }
Пример #13
0
 public abstract void Train(IForecastingDataSets datasets);
 public ComponentRunEventArgs(IForecastingDataSets datasets)
 {
     mIForecastingDataSets = datasets;
 }
Пример #15
0
        public void Train(IForecastingDataSets datasets)
        {
            NeedToStop = false;
            if (ModelStartRunning != null)
            {
                ModelStartRunning(this, new ComponentRunEventArgs(datasets));
            }
            IActivationFunction actFunc = null;
            double sigmoidAlphaValue    = mAnnModelParameter.SigmoidAlphaValue;

            if (mAnnModelParameter.HiddenActivationFunction == fann_activationfunc_enum.FANN_SIGMOID_SYMMETRIC)
            {
                actFunc = new BipolarSigmoidFunction(sigmoidAlphaValue);
            }
            else if (mAnnModelParameter.HiddenActivationFunction == fann_activationfunc_enum.FANN_SIGMOID)
            {
                actFunc = new SigmoidFunction(sigmoidAlphaValue);
            }
            else if (mAnnModelParameter.HiddenActivationFunction == fann_activationfunc_enum.FANN_THRESHOLD)
            {
                actFunc = new ThresholdFunction();
            }
            else
            {
                actFunc = new BipolarSigmoidFunction(sigmoidAlphaValue);
            }

            mAnnModelParameter.InputNeuronCount  = datasets.InputData[0].Length;
            mAnnModelParameter.OutputNeuronCount = datasets.OutputData[0].Length;
            int inputsCount  = mAnnModelParameter.InputNeuronCount;
            int outputsCount = mAnnModelParameter.OutputNeuronCount;

            // mAnnModelParameter.HiddenNeuronsCount = new int[1];
            //      mAnnModelParameter.HiddenNeuronsCount[0] = datasets.InputData[0].Length * 2 + 1;
            mAnnModelParameter.HiddenCount = 1;

            int[] neuronsCount = new int[mAnnModelParameter.HiddenNeuronsCount.Length + 1];
            for (int i = 0; i < mAnnModelParameter.HiddenNeuronsCount.Length; i++)
            {
                neuronsCount[i] = mAnnModelParameter.HiddenNeuronsCount[i];
            }
            neuronsCount[mAnnModelParameter.HiddenNeuronsCount.Length] = outputsCount;

            mNetwork = new ActivationNetwork(actFunc, inputsCount, neuronsCount);
            BackPropagationLearning teacher = new BackPropagationLearning(mNetwork);
            ActivationLayer         layer   = mNetwork[0];

            teacher.LearningRate = mAnnModelParameter.LearningRate;
            teacher.Momentum     = mAnnModelParameter.LearningMomentum;

            List <double> arError      = new List <double>();
            int           solutionSize = datasets.InputData.Length;

            datasets.ForecastedData = new double[solutionSize][];
            int iteration = 1;

            while (!mNeedToStop)
            {
                double error = teacher.RunEpoch(datasets.InputData, datasets.OutputData);
                arError.Add(error);

                double learningError   = 0.0;
                double predictionError = 0.0;

                for (int i = 0, n = solutionSize; i < n; i++)
                {
                    datasets.ForecastedData[i] = (double[])mNetwork.Compute(datasets.InputData[i]).Clone();

                    if (i >= n - mAnnModelParameter.MaximumWindowSize)
                    {
                        predictionError += Math.Abs(datasets.OutputData[i][0] - datasets.ForecastedData[i][0]);
                    }
                    else
                    {
                        learningError += Math.Abs(datasets.OutputData[i][0] - datasets.ForecastedData[i][0]);
                    }
                }
                if (iteration >= mAnnModelParameter.Iterations)
                {
                    NeedToStop = true;
                }
                if (learningError <= mAnnModelParameter.DesiredError)
                {
                    NeedToStop = true;
                }
                if (ModelRunningEpoch != null)
                {
                    ModelRunningEpoch(this, new AnnModelRunEpochEventArgs(iteration, error));
                }
                iteration++;
            }

            LayerWeightCollection = new LayerWeight[mNetwork.LayersCount];

            LayerWeightCollection[0].Weight      = new double[layer.NeuronsCount][];
            LayerWeightCollection[0].ThreashHold = new double[layer.NeuronsCount][];
            for (int i = 0; i < layer.NeuronsCount; i++)
            {
                LayerWeightCollection[0].Weight[i]      = new double[layer.InputsCount];
                LayerWeightCollection[0].ThreashHold[i] = new double[layer.InputsCount];
                for (int j = 0; j < layer.InputsCount; j++)
                {
                    LayerWeightCollection[0].Weight[i][j]      = layer[i][j];
                    LayerWeightCollection[0].ThreashHold[i][j] = layer[i][j];
                }
            }

            layer = mNetwork[1];
            LayerWeightCollection[1].Weight      = new double[layer.NeuronsCount][];
            LayerWeightCollection[1].ThreashHold = new double[layer.NeuronsCount][];
            for (int i = 0; i < layer.NeuronsCount; i++)
            {
                LayerWeightCollection[1].Weight[i]      = new double[layer.InputsCount];
                LayerWeightCollection[1].ThreashHold[i] = new double[layer.InputsCount];
                for (int j = 0; j < layer.InputsCount; j++)
                {
                    LayerWeightCollection[1].Weight[i][j]      = layer[i][j];
                    LayerWeightCollection[1].ThreashHold[i][j] = layer[i][j];
                }
            }

            if (ModelFinishRunning != null)
            {
                ModelFinishRunning(this, new ComponentRunEventArgs(datasets));
            }
        }