Inheritance: INeuralData
        /// <summary>
        /// Generate the input to the neural network to predict.  It will look at the current
        /// date as well as the number of days leading up to it specified by EvalWindow.
        /// This method is used both internally and externally.  
        /// </summary>
        /// <param name="marketData">The market data to use.</param>
        /// <param name="marketDataIndex">The point that we want to predict from.</param>
        /// <returns></returns>
        public INeuralData CreateData(
            List<LoadedMarketData> marketData,
            int marketDataIndex)
        {
            INeuralData neuralData = new BasicNeuralData(14);
            int totalPatterns = 0;
            var patternCount = new int[14];

            for (var i = 0; i < EvalWindow; i++)
            {
                var data = marketData[(marketDataIndex-EvalWindow) + i];

                var candle = new IdentifyCandleStick();
                candle.SetStats(data);
                var pattern = candle.DeterminePattern();
                if (pattern == IdentifyCandleStick.UNKNOWN) continue;
                totalPatterns++;
                patternCount[pattern]++;
            }

            if (totalPatterns == 0)
                return null;

            for (var i = 0; i < 14; i++)
            {
                neuralData[i] = patternCount[i] / ((double)totalPatterns);
            }

            return neuralData;
        }
        /// <summary>
        /// Construct a data set from an input and idea array.
        /// </summary>
        /// <param name="input">The input into the neural network for training.</param>
        /// <param name="ideal">The idea into the neural network for training.</param>
        public BasicNeuralDataSet(double[][] input, double[][] ideal)
        {
            for (int i = 0; i < input.Length; i++)
            {
                double[] tempInput = new double[input[0].Length];
                double[] tempIdeal = null;

                for (int j = 0; j < tempInput.Length; j++)
                {
                    tempInput[j] = input[i][j];
                }

                BasicNeuralData idealData = null;

                if (ideal != null)
                {
                    tempIdeal = new double[ideal[0].Length];
                    for (int j = 0; j < tempIdeal.Length; j++)
                    {
                        tempIdeal[j] = ideal[i][j];
                    }
                    idealData = new BasicNeuralData(tempIdeal);
                }

                BasicNeuralData inputData = new BasicNeuralData(tempInput);

                this.Add(inputData, idealData);
            }
        }
        /// <summary>
        /// Generate a random training set. 
        /// </summary>
        /// <param name="seed">The seed value to use, the same seed value will always produce
        /// the same results.</param>
        /// <param name="count">How many training items to generate.</param>
        /// <param name="inputCount">How many input numbers.</param>
        /// <param name="idealCount">How many ideal numbers.</param>
        /// <param name="min">The minimum random number.</param>
        /// <param name="max">The maximum random number.</param>
        /// <returns>The random training set.</returns>
        public static BasicNeuralDataSet Generate(long seed,
                int count, int inputCount,
                int idealCount, double min, double max)
        {

            LinearCongruentialGenerator rand =
                new LinearCongruentialGenerator(seed);

            BasicNeuralDataSet result = new BasicNeuralDataSet();
            for (int i = 0; i < count; i++)
            {
                INeuralData inputData = new BasicNeuralData(inputCount);

                for (int j = 0; j < inputCount; j++)
                {
                    inputData.Data[j] = rand.Range(min, max);
                }

                INeuralData idealData = new BasicNeuralData(idealCount);

                for (int j = 0; j < idealCount; j++)
                {
                    idealData[j] = rand.Range(min, max);
                }

                BasicNeuralDataPair pair = new BasicNeuralDataPair(inputData,
                        idealData);
                result.Add(pair);

            }
            return result;
        }
        /// <summary>
        /// Handle reading an item tag.
        /// </summary>
        /// <param name="xmlIn">The XML reader.</param>
        private void HandleItem(ReadXML xmlIn)
        {
            IDictionary<String, String> properties = xmlIn.ReadPropertyBlock();
            INeuralDataPair pair = null;
            INeuralData input = new BasicNeuralData(NumberList
                   .FromList(CSVFormat.EG_FORMAT, properties
                           [BasicNeuralDataSetPersistor.TAG_INPUT]));

            if (properties.ContainsKey(BasicNeuralDataSetPersistor.TAG_IDEAL))
            {
                // supervised
                INeuralData ideal = new BasicNeuralData(NumberList
                       .FromList(CSVFormat.EG_FORMAT, properties
                               [BasicNeuralDataSetPersistor.TAG_IDEAL]));
                pair = new BasicNeuralDataPair(input, ideal);
            }
            else
            {
                // unsupervised
                pair = new BasicNeuralDataPair(input);
            }

            this.currentDataSet.Add(pair);
        }
 /// <inheritdoc/>
 public void Compute(double[] input, double[] output)
 {
     BasicNeuralData input2 = new BasicNeuralData(input);
     INeuralData output2 = this.Compute(input2);
     EngineArray.ArrayCopy(output2.Data, output);
 }
        /// <summary>
        /// Compute the output for the given input.
        /// </summary>
        /// <param name="input">The input to the SVM.</param>
        /// <returns>The results from the SVM.</returns>
        public override INeuralData Compute(INeuralData input)
        {
            INeuralData result = new BasicNeuralData(this.outputCount);

            svm_node[] formattedInput = MakeSparse(input);

            for (int i = 0; i < this.outputCount; i++)
            {
                double d = svm.svm_predict(this.models[i], formattedInput);
                result[i] = d;
            }
            return result;
        }
Beispiel #7
0
 public virtual BasicNeuralData GenerateInputNeuralData(int index)
 {
     BasicNeuralData data;
     int num;
     int num2;
     if ((index + this._xc278386c02fd6e51) <= this._x6fa2570084b2ad39.Count)
     {
         data = new BasicNeuralData(this._x57202a8751db8895);
         num = 0;
         num2 = 0;
         goto Label_007B;
     }
     if ((((uint) index) & 0) == 0)
     {
         if ((((uint) num) & 0) == 0)
         {
             throw new TemporalError("Can't generate input temporal data beyond the end of provided data.");
         }
         goto Label_0077;
     }
     Label_002C:
     foreach (TemporalDataDescription description in this._x6849a3dfb0329317)
     {
         if (description.IsInput)
         {
             data[num++] = this.x8c7fc30b887213d1(description, index + num2);
         }
     }
     Label_0077:
     num2++;
     Label_007B:
     if (num2 < this._xc278386c02fd6e51)
     {
         goto Label_002C;
     }
     return data;
 }
        /// <summary>
        /// Mencari solusi model neural network
        /// </summary>
        private void searchSolution()
        {
            // Normalize Data
            switch (this.selectedActivationFunction)
            {
                case ActivationFunctionEnumeration.SemiLinearFunction:
                    this.activationFunction = new SemiLinearFunction();
                    this.normalizeData(0.1, 0.9);
                    break;
                case ActivationFunctionEnumeration.SigmoidFunction:
                    this.activationFunction = new SigmoidFunction();
                    this.normalizeData(0.1, 0.9);
                    break;
                case ActivationFunctionEnumeration.BipolarSigmoidFunction:
                    this.activationFunction = new BipolarSigmoidFunction();
                    this.normalizeData(-0.9, 0.9);
                    break;
                case ActivationFunctionEnumeration.HyperbolicTangentFunction:
                    this.activationFunction = new HyperbolicTangentFunction();
                    this.normalizeData(-0.9, 0.9);
                    break;
                default:
                    this.activationFunction = new BipolarSigmoidFunction();
                    this.normalizeData(-0.9, 0.9);
                    break;
            }

            //create network
            this.network = new BasicNetwork();
            this.network.AddLayer(new FeedforwardLayer(this.activationFunction, this.inputLayerNeurons));
            this.network.AddLayer(new FeedforwardLayer(this.activationFunction, this.hiddenLayerNeurons));
            this.network.AddLayer(new FeedforwardLayer(this.activationFunction, this.outputLayerNeurons));
            this.network.Reset();

            //variable for looping
            //needToStop = false;
            double mse = 0.0, error = 0.0, mae=0.0;
            int iteration = 1;

            // parameters
            double msle = 0.0, mspe = 0.0, generalizationLoss = 0.0, pq = 0.0;
            double[] trainingErrors = new double[this.strip];
            for (int i = 0; i < this.strip; i++) trainingErrors[i] = double.MaxValue / strip;

            double lastMSE = double.MaxValue;

            // advanced early stopping
            int n = this.data.Length - this.network.InputLayer.NeuronCount;
            int validationSet = (int)Math.Round(this.validationSetRatio * n);
            int trainingSet = n - validationSet;
            double[][] networkTrainingInput = new double[trainingSet][];
            double[][] networkTrainingOutput = new double[trainingSet][];
            for (int i = 0; i < trainingSet; i++)
            {
                networkTrainingInput[i] = new double[this.network.InputLayer.NeuronCount];
                networkTrainingOutput[i] = new double[1];
            }
            for (int i = 0; i < trainingSet; i++)
            {
                for (int j = 0; j < this.network.InputLayer.NeuronCount; j++)
                {
                    networkTrainingInput[i][j] = this.networkInput[i][j];
                }
                networkTrainingOutput[i][0] = this.networkOutput[i][0];
            }

            // validation set
            double[] solutionValidation = new double[validationSet];
            double[] inputForValidation = new double[this.network.InputLayer.NeuronCount];
            double[] inputForValidationNetwork = new double[this.network.InputLayer.NeuronCount];

            // array for saving neural weights and parameters
            this.bestValidationError = double.MaxValue;
            this.bestWeightMatrix = new double[this.network.Layers.Count -1][,];
            this.bestSolution = new double[n];

            for (int i = 0; i < this.network.Layers.Count - 1; i++)
            {
                this.bestWeightMatrix[i] = new double[this.network.Layers[i].WeightMatrix.Rows, this.network.Layers[i].WeightMatrix.Cols];
            }

            //best network criterion
            double bestNetworkError = double.MaxValue, bestNetworkMSE = double.MaxValue, bestNetworkMAE = double.MaxValue;

            // build array for graph
            this.solutionData = new double[n];
            this.predictedPoint = new cPoint[n];
            this.validationPoint = new cPoint[validationSet];

            //initialize point for graph
            predictedDS.Samples = predictedPoint;
            validationDS.Samples = validationPoint;
            this.predictedDS.Active = true;

            // prepare training data
            INeuralDataSet dataset;
            if (this.useAdvanceEarlyStopping)
                dataset = new BasicNeuralDataSet(networkTrainingInput, networkTrainingOutput);
            else
                dataset = new BasicNeuralDataSet(this.networkInput, this.networkOutput);

            // initialize trainer
            this.learning = new Backpropagation(this.network, dataset, this.learningRate, this.momentum);

            //training
            while (!needToStop)
            {
                double sse = 0.0;
                double sae = 0.0;
                double ssle = 0.0;
                double sspe = 0.0;

                this.learning.Iteration();
                error = learning.Error;

                if (this.useAdvanceEarlyStopping)
                {
                    this.validationDS.Active = true;
                }
                else
                {
                    this.validationDS.Active = false;
                }

                for (int i = 0; i < n; i++)
                {
                    INeuralData neuraldata = new BasicNeuralData(this.networkInput[i]);

                    this.solutionData[i] = (this.network.Compute(neuraldata)[0]
                        - this.minNormalizedData) / this.factor + this.minData;

                    this.predictedPoint[i].x = i + this.network.InputLayer.NeuronCount;
                    this.predictedPoint[i].y = (float)this.solutionData[i];

                    sse += Math.Pow(this.solutionData[i] - this.data[i + this.network.InputLayer.NeuronCount], 2);
                    sae += Math.Abs(this.solutionData[i] - this.data[i + this.network.InputLayer.NeuronCount]);

                    //calculate advance early stopping
                    if (this.useAdvanceEarlyStopping)
                    {
                        if (i < n - validationSet)
                        {
                            ssle += Math.Pow(this.solutionData[i] - this.data[i + this.network.InputLayer.NeuronCount], 2);
                        }
                        else
                        {

                            // initialize the first validation set input
                            if (i == n - validationSet)
                            {
                                for (int j = 0; j < this.network.InputLayer.NeuronCount; j++)
                                {
                                    inputForValidation[this.network.InputLayer.NeuronCount - 1 - j] = this.data[this.data.Length - (n - i) - 1 - j];
                                }
                            }

                            for (int j = 0; j < this.network.InputLayer.NeuronCount; j++)
                            {
                                inputForValidationNetwork[j] = (inputForValidation[j] - this.minData) * this.factor + this.minNormalizedData;
                            }

                            INeuralData neuraldataval = new BasicNeuralData(inputForValidationNetwork);
                            solutionValidation[i - n + validationSet] = (this.network.Compute(neuraldataval)[0] - this.minNormalizedData) / this.factor + this.minData;

                            this.validationPoint[i - n + validationSet].x = i + this.network.InputLayer.NeuronCount;
                            this.validationPoint[i - n + validationSet].y = (float)solutionValidation[i - n + validationSet];

                            sspe += Math.Pow(this.data[i + this.network.InputLayer.NeuronCount] - solutionValidation[i - n + validationSet], 2);

                            // initialize the next validation set input from the current validation set input
                            for (int j = 0; j < this.network.InputLayer.NeuronCount - 1; j++)
                            {
                                inputForValidation[j] = inputForValidation[j + 1];
                            }

                            inputForValidation[this.network.InputLayer.NeuronCount - 1] = solutionValidation[i - n + validationSet];

                        }
                    }

                }

                mse = sse / this.solutionData.Length;
                mae = sae / this.solutionData.Length;

                //Console.WriteLine(error.ToString());

                //Display it
                this.iterationBox.Text = iteration.ToString();
                this.maeBox.Text = mae.ToString("F5");
                this.mseBox.Text = mse.ToString("F5");
                this.errorBox.Text = error.ToString("F5");

                seriesGraph.Refresh();

                if (this.useAdvanceEarlyStopping)
                {
                    //calculate advance early stopping 2
                    mspe = sspe / validationSet;
                    msle = ssle / (this.solutionData.Length - validationSet);

                    //save best weight
                    if (this.bestValidationError > mspe)
                    {
                        this.bestValidationError = mspe;
                        this.bestSolution = this.solutionData;

                        // weight matrix
                        for (int i = 0; i < this.network.Layers.Count - 1; i++)
                            for (int j = 0; j < this.network.Layers[i].WeightMatrix.Rows; j++)
                                for (int k = 0; k < this.network.Layers[i].WeightMatrix.Cols; k++)
                                    this.bestWeightMatrix[i][j,k] = this.network.Layers[i].WeightMatrix[j, k];

                        bestNetworkError = error;
                        bestNetworkMAE = mae;
                        bestNetworkMSE = mse;

                    }
                    //calculate generalization loss &pq
                    generalizationLoss = 100 * (mspe / this.bestValidationError - 1);

                    trainingErrors[(iteration - 1) % this.strip] = msle;
                    double minStripTrainingError = double.MaxValue, sumStripTrainingError = 0.0;
                    for (int i = 0; i < this.strip; i++)
                    {
                        sumStripTrainingError += trainingErrors[i];
                        if (trainingErrors[i] < minStripTrainingError) minStripTrainingError = trainingErrors[i];
                    }
                    double trainingProgress = 1000 * ((sumStripTrainingError / (this.strip * minStripTrainingError)) - 1);
                    pq = generalizationLoss / trainingProgress;

                    //display advance early stopping
                    this.learningErrorBox.Text = msle.ToString("F5");
                    this.validationErrorBox.Text = mspe.ToString("F5");
                    this.generalizationLossBox.Text = generalizationLoss.ToString("F5");
                    this.pqBox.Text = pq.ToString("F5");
                    this.seriesGraph.Refresh();

                    //stopping
                    switch (this.advanceStoppingMethod)
                    {
                        case AdvanceStoppingMethodEnumeration.GeneralizationLoss:
                            if (generalizationLoss > this.generalizationLossTreshold) needToStop = true;
                            break;
                        case AdvanceStoppingMethodEnumeration.ProgressQuotient:
                            if (pq > this.pqTreshold) needToStop = true;
                            break;
                    }

                }

                if (this.withCheckingCycle && iteration % this.checkingCycle == 0)
                {
                    switch (this.checkingMethod)
                    {
                        case CheckingMethodEnumeration.byMSEValue:
                            if (mse <= this.byMSEValueStopping) needToStop = true;
                            break;
                        case CheckingMethodEnumeration.byMSEChange:
                            if (lastMSE - mse <= this.byMSEChangeStopping) needToStop = true;
                            break;
                    }
                    lastMSE = mse;
                }
                if (iteration >= this.maxIteration)
                {
                    needToStop = true;
                }

                iteration++;
            }

            //restore weight
            if (this.useAdvanceEarlyStopping)
            {
                this.solutionData = this.bestSolution;

                // weight matrix

                for (int i = 0; i < this.network.Layers.Count - 1; i++)
                    for (int j = 0; j < this.network.Layers[i].WeightMatrix.Rows; j++)
                        for (int k = 0; k < this.network.Layers[i].WeightMatrix.Cols; k++)
                            this.network.Layers[i].WeightMatrix[j, k] = this.bestWeightMatrix[i][j, k];

                //best network criterion
                this.error = bestNetworkError;
                this.mse = bestNetworkMSE;
                this.mae = bestNetworkMAE;
            }
            else
            {
                this.error = error;
                this.mse = mse;
                this.mae = mae;
            }

            this.enableControls(true);
        }
        /// <summary>
        /// Construct the chain rule calculation.
        /// </summary>
        /// <param name="network">The network to use.</param>
        /// <param name="indexableTraining">The training set to use.</param>
        public JacobianChainRule(BasicNetwork network,
                 IIndexable indexableTraining)
        {
            this.indexableTraining = indexableTraining;
            this.network = network;
            this.parameterSize = network.Structure.CalculateSize();
            this.inputLength = (int)this.indexableTraining.Count;
            this.jacobian = EngineArray.AllocateDouble2D(this.inputLength, this.parameterSize);
            this.rowErrors = new double[this.inputLength];

            BasicNeuralData input = new BasicNeuralData(
                   this.indexableTraining.InputSize);
            BasicNeuralData ideal = new BasicNeuralData(
                   this.indexableTraining.IdealSize);
            this.pair = new BasicNeuralDataPair(input, ideal);
        }
        /// <summary>
        /// Generate neural ideal data for the specified index.
        /// </summary>
        /// <param name="index">The index to generate for.</param>
        /// <returns>The neural data generated.</returns>
        public virtual BasicNeuralData GenerateOutputNeuralData(int index)
        {
            if (index + _predictWindowSize > _points.Count)
            {
                throw new TemporalError("Can't generate prediction temporal data "
                                        + "beyond the end of provided data.");
            }

            var result = new BasicNeuralData(_outputNeuronCount);
            int resultIndex = 0;

            for (int i = 0; i < _predictWindowSize; i++)
            {
                foreach (TemporalDataDescription desc in _descriptions)
                {
                    if (desc.IsPredict)
                    {
                        result[resultIndex++] = FormatData(desc, index
                                                                 + i);
                    }
                }
            }
            return result;
        }
        /// <summary>
        /// Write an array.
        /// </summary>
        /// <param name="data">The data to write.</param>
        /// <param name="inputCount">How much of the data is input.</param>
        public void Write(double[] data, int inputCount)
        {

            if (this.idealCount == 0)
            {
                BasicNeuralData inputData = new BasicNeuralData(data);
                this.dataset.Add(inputData);
            }
            else
            {
                BasicNeuralData inputData = new BasicNeuralData(
                       this.inputCount);
                BasicNeuralData idealData = new BasicNeuralData(
                       this.idealCount);

                int index = 0;
                for (int i = 0; i < this.inputCount; i++)
                {
                    inputData[i] = data[index++];
                }

                for (int i = 0; i < this.idealCount; i++)
                {
                    idealData[i] = data[index++];
                }

                this.dataset.Add(inputData, idealData);
            }
        }
        /// <summary>
        /// Generate random training into a training set.
        /// </summary>
        /// <param name="training">The training set to generate into.</param>
        /// <param name="seed">The seed to use.</param>
        /// <param name="count">How much data to generate.</param>
        /// <param name="min">The low random value.</param>
        /// <param name="max">The high random value.</param>
        public static void Generate(INeuralDataSet training,
                long seed,
                int count,
                double min, double max)
        {

            LinearCongruentialGenerator rand
                = new LinearCongruentialGenerator(seed);

            int inputCount = training.InputSize;
            int idealCount = training.IdealSize;

            for (int i = 0; i < count; i++)
            {
                INeuralData inputData = new BasicNeuralData(inputCount);

                for (int j = 0; j < inputCount; j++)
                {
                    inputData[j] = rand.Range(min, max);
                }

                INeuralData idealData = new BasicNeuralData(idealCount);

                for (int j = 0; j < idealCount; j++)
                {
                    idealData[j] = rand.Range(min, max);
                }

                BasicNeuralDataPair pair = new BasicNeuralDataPair(inputData,
                        idealData);
                training.Add(pair);

            }
        }
Beispiel #13
0
 public IMLData BuildForNetworkInput(double[] data)
 {
     int num2;
     int num5;
     int num = 0;
     goto Label_0148;
     Label_00A8:
     this.InitForOutput();
     IMLData data2 = new BasicNeuralData(num2);
     int num4 = 0;
     using (IEnumerator<IOutputField> enumerator3 = this._outputFields.GetEnumerator())
     {
         IOutputField field3;
         goto Label_00F6;
     Label_00C8:
         num5 = 0;
         while (num5 < field3.SubfieldCount)
         {
             data2.Data[num4++] = field3.Calculate(num5);
             num5++;
         }
     Label_00F6:
         if (enumerator3.MoveNext())
         {
             goto Label_0119;
         }
         return data2;
     Label_0101:
         if ((((uint) num) & 0) != 0)
         {
             goto Label_0122;
         }
         goto Label_00C8;
     Label_0119:
         field3 = enumerator3.Current;
     Label_0122:
         if (field3.Ideal)
         {
             goto Label_00F6;
         }
         goto Label_0101;
     }
     if (2 != 0)
     {
         return data2;
     }
     Label_0148:
     using (IEnumerator<IInputField> enumerator = this._inputFields.GetEnumerator())
     {
         IInputField field;
     Label_0157:
         if (enumerator.MoveNext())
         {
             goto Label_01D2;
         }
         goto Label_01EF;
     Label_0168:
         if (!field.UsedForNetworkInput)
         {
             goto Label_0157;
             if (((uint) num) >= 0)
             {
                 goto Label_01A2;
             }
         }
         if (num >= data.Length)
         {
             throw new NormalizationError("Can't build data, input fields used for neural input, must match provided data(" + data.Length + ").");
         }
         if ((((uint) num4) - ((uint) num2)) < 0)
         {
             goto Label_0157;
         }
     Label_01A2:
         field.CurrentValue = data[num++];
         goto Label_0157;
     Label_01D2:
         field = enumerator.Current;
         goto Label_0168;
     }
     Label_01EF:
     num2 = 0;
     if ((((uint) num5) - ((uint) num)) <= uint.MaxValue)
     {
         using (IEnumerator<IOutputField> enumerator2 = this._outputFields.GetEnumerator())
         {
             IOutputField field2;
         Label_0031:
             if (enumerator2.MoveNext())
             {
                 goto Label_0077;
             }
             goto Label_00A8;
         Label_003E:
             while (field2.Ideal)
             {
                 if (((uint) num2) <= uint.MaxValue)
                 {
                     goto Label_0031;
                 }
             }
             int num3 = 0;
             while (num3 < field2.SubfieldCount)
             {
                 num2++;
                 num3++;
             }
             goto Label_0031;
         Label_0077:
             field2 = enumerator2.Current;
             if ((((uint) num3) & 0) == 0)
             {
             }
             goto Label_003E;
         }
     }
     goto Label_00A8;
 }
        /// <summary>
        /// Construct the LMA object.
        /// </summary>
        /// <param name="network">The network to train. Must have a single output neuron.</param>
        /// <param name="training">The training data to use. Must be indexable.</param>
        public SVDTraining(BasicNetwork network, INeuralDataSet training)
        {
            ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT);

            if (outputLayer == null)
            {
                throw new TrainingError("SVD requires an output layer.");
            }

            if (outputLayer.NeuronCount != 1)
            {
                throw new TrainingError("SVD requires an output layer with a single neuron.");
            }

            if (network.GetLayer(RadialBasisPattern.RBF_LAYER) == null)
                throw new TrainingError("SVD is only tested to work on radial basis function networks.");

            rbfLayer = (RadialBasisFunctionLayer)network.GetLayer(RadialBasisPattern.RBF_LAYER);

            this.Training = training;
            this.network = network;
            this.trainingLength = (int)this.Training.InputSize;

            BasicNeuralData input = new BasicNeuralData(this.Training.InputSize);
            BasicNeuralData ideal = new BasicNeuralData(this.Training.IdealSize);
            this.pair = new BasicNeuralDataPair(input, ideal);
        }
Beispiel #15
0
 public virtual BasicNeuralData GenerateOutputNeuralData(int index)
 {
     if ((index + this._xcfe8f2ba20a6a8e4) <= this._x6fa2570084b2ad39.Count)
     {
         BasicNeuralData data = new BasicNeuralData(this._x0c37ff3adde9bc44);
         int num = 0;
         int num2 = 0;
         while (num2 < this._xcfe8f2ba20a6a8e4)
         {
             using (IEnumerator<TemporalDataDescription> enumerator = this._x6849a3dfb0329317.GetEnumerator())
             {
                 TemporalDataDescription current;
                 goto Label_005F;
             Label_002C:
                 data[num++] = this.x8c7fc30b887213d1(current, index + num2);
                 if ((((uint) num) & 0) == 0)
                 {
                 }
                 goto Label_005F;
             Label_0057:
                 if (current.IsPredict)
                 {
                     goto Label_002C;
                 }
             Label_005F:
                 if (enumerator.MoveNext())
                 {
                     current = enumerator.Current;
                     goto Label_0057;
                 }
             }
             num2++;
         }
         if ((((uint) num) - ((uint) num2)) >= 0)
         {
             return data;
         }
     }
     throw new TemporalError("Can't generate prediction temporal data beyond the end of provided data.");
 }
        /// <summary>
        /// Generate input neural data for the specified index.
        /// </summary>
        /// <param name="index">The index to generate neural data for.</param>
        /// <returns>The input neural data generated.</returns>
        public virtual BasicNeuralData GenerateInputNeuralData(int index)
        {
            if (index + this.inputWindowSize > this.points.Count)
            {
                throw new TemporalError("Can't generate input temporal data "
                        + "beyond the end of provided data.");
            }

            BasicNeuralData result = new BasicNeuralData(this.inputNeuronCount);
            int resultIndex = 0;

            for (int i = 0; i < this.inputWindowSize; i++)
            {
                int descriptionIndex = 0;

                foreach (TemporalDataDescription desc in this.descriptions)
                {
                    if (desc.IsInput)
                    {
                        result[resultIndex++] = this.FormatData(desc, index
                                + i);
                    }
                    descriptionIndex++;
                }
            }
            return result;
        }
        /// <summary>
        /// Compute the values before sending output to the next layer.
        /// This function allows the activation functions to be called.
        /// </summary>
        /// <param name="pattern">The incoming Project.</param>
        /// <returns>The output from this layer.</returns>
        public override INeuralData Compute(INeuralData pattern)
        {
            INeuralData result = new BasicNeuralData(NeuronCount);

            for (int i = 0; i < NeuronCount; i++)
            {

                if (this.radialBasisFunction[i] == null)
                {
                    String str =
               "Error, must define radial functions for each neuron";
#if logging
                    if (RadialBasisFunctionLayer.logger.IsErrorEnabled)
                    {
                        RadialBasisFunctionLayer.logger.Error(str);
                    }
#endif
                    throw new NeuralNetworkError(str);
                }

                IRadialBasisFunction f = this.radialBasisFunction[i];

                if (pattern.Data.Length != f.Dimensions)
                    throw new Exception("Inputs must equal the number of dimensions.");

                result[i] = f.Calculate(pattern.Data);
            }

            return result;
        }
        /// <summary>
        /// Clone this object.
        /// </summary>
        /// <returns>A clone of this object.</returns>
        public object Clone()
        {
            BasicNeuralData result = new BasicNeuralData(this.data);

            return(result);
        }
        /// <summary>
        /// Compute the weighted output from this synapse. Each neuron
        /// in the from layer has a weighted connection to each of the
        /// neurons in the next layer. 
        /// </summary>
        /// <param name="input">The input from the synapse.</param>
        /// <returns>The output from this synapse.</returns>
        public override INeuralData Compute(INeuralData input)
        {
            	INeuralData result = new BasicNeuralData(this.ToNeuronCount);
		
		double[] inputArray = input.Data;
		double[][] matrixArray = this.WeightMatrix.Data;
		double[] resultArray = result.Data;

		for (int i = 0; i < this.ToNeuronCount; i++) {
			
			double sum = 0;
			for(int j = 0;j<inputArray.Length;j++ )
			{
				sum+=inputArray[j]*matrixArray[j][i];
			}
			resultArray[i] = sum;
		}
		return result;
        }
 /// <summary>
 /// Clone this object.
 /// </summary>
 /// <returns>A clone of this object.</returns>
 public object Clone()
 {
     BasicNeuralData result = new BasicNeuralData(this.data);
     return result;
 }
Beispiel #21
0
        /// <summary>
        /// Build "input data for a neural network" based on the input values
        /// provided.  This allows  input for a neural network to be normalized.
        /// This is typically used when data is to be presented to a trained
        /// neural network.
        /// </summary>
        /// <param name="data">The input values to be normalized.</param>
        /// <returns>The data to be sent to the neural network.</returns>
        public IMLData BuildForNetworkInput(double[] data)
        {
            // feed the input fields
            int index = 0;
            foreach (IInputField field in _inputFields)
            {
                if (field.UsedForNetworkInput)
                {
                    if (index >= data.Length)
                    {
                        throw new NormalizationError(
                            "Can't build data, input fields used for neural input, must match provided data("
                            + data.Length + ").");
                    }
                    field.CurrentValue = data[index++];
                }
            }

            // count the output fields
            int outputCount = 0;
            foreach (IOutputField ofield in _outputFields)
            {
                if (!ofield.Ideal)
                {
                    for (int sub = 0; sub < ofield.SubfieldCount; sub++)
                    {
                        outputCount++;
                    }
                }
            }

            // process the output fields

            InitForOutput();

            var result = new BasicNeuralData(outputCount);

            // write the value
            int outputIndex = 0;
            foreach (IOutputField ofield in _outputFields)
            {
                if (!ofield.Ideal)
                {
                    for (int sub = 0; sub < ofield.SubfieldCount; sub++)
                    {
                        result[outputIndex++] = ofield.Calculate(sub);
                    }
                }
            }

            return result;
        }
        /// <summary>
        /// Create one training pair, either good or bad.
        /// </summary>
        /// <param name="data">The data to create from.</param>
        /// <param name="index">The index into the data to create from.</param>
        /// <param name="good">True if this was a good(bearish) period.</param>
        /// <returns></returns>
        public IMLDataPair CreateData(List<LoadedMarketData> data, int index, bool good)
        {
            BasicNeuralData ideal = new BasicNeuralData(1);

            INeuralData input = CreateData(data, index);

            if (input == null)
                return null;

            // ideal
            if (good)
                ideal[0] = 0.9;
            else
                ideal[0] = 0.1;

            return new BasicMLDataPair(input, ideal);
        }
        /// <summary>
        /// Meramalkan data
        /// </summary>
        /// <param name="step">step peramalan</param>
        /// <returns>hasil peramalan</returns>
        public double[] Forecast(int step)
        {
            if (step < 1) step = 1;

            int inputsCount = this.network.InputLayer.NeuronCount;
            double[] result = new double[step];
            double[] inputForOut = new double[inputsCount];
            double[] inputForOutNetwork = new double[inputsCount];

            //input for forecasting
            for (int j = 0; j < inputsCount; j++)
            {
                inputForOut[inputsCount - 1 - j] = this.data[this.data.Length - 1 - j];
            }

            //forecast
            for (int i = 0; i < step; i++)
            {
                for (int j = 0; j < inputsCount; j++)
                {
                    inputForOutNetwork[j] = (inputForOut[j] - this.minData) * this.factor + this.minNormalizedData;
                }

                INeuralData neuraldata = new BasicNeuralData(inputForOutNetwork);
                // evalue the function
                result[i] = (this.network.Compute(neuraldata)[0] - this.minNormalizedData) / this.factor + this.minData;

                for (int j = 0; j < inputsCount - 1; j++)
                {
                    inputForOut[j] = inputForOut[j + 1];
                }

                inputForOut[inputsCount - 1] = result[i];
            }

            return result;
        }
        /// <summary>
        /// Compute the output from this synapse. 
        /// </summary>
        /// <param name="input">The input to this synapse.</param>
        /// <returns>The output from this synapse.</returns>
        public INeuralData Compute(INeuralData input)
        {
            INeuralData result = new BasicNeuralData(ToNeuronCount);

            if (this.neurons.Count == 0)
            {
                throw new NeuralNetworkError("This network has not been evolved yet, it has no neurons in the NEAT synapse.");
            }

            int flushCount = 1;

            if (snapshot)
            {
                flushCount = networkDepth;
            }

            // iterate through the network FlushCount times
            for (int i = 0; i < flushCount; ++i)
            {
                int outputIndex = 0;
                int index = 0;

                result.Clear();

                // populate the input neurons
                while (neurons[index].NeuronType == NEATNeuronType.Input)
                {
                    neurons[index].Output = input[index];

                    index++;
                }

                // set the bias neuron
                neurons[index++].Output = 1;

                while (index < neurons.Count)
                {

                    NEATNeuron currentNeuron = neurons[index];

                    double sum = 0;

                    foreach (NEATLink link in currentNeuron.InboundLinks)
                    {
                        double weight = link.Weight;
                        double neuronOutput = link.FromNeuron.Output;
                        sum += weight * neuronOutput;
                    }

                    double[] d = new double[1];
                    d[0] = sum / currentNeuron.ActivationResponse;
                    activationFunction.ActivationFunction(d,0,1);

                    neurons[index].Output = d[0];

                    if (currentNeuron.NeuronType == NEATNeuronType.Output)
                    {
                        result.Data[outputIndex++] = currentNeuron.Output;
                    }
                    index++;
                }
            }

            return result;
        }
        /// <summary>
        /// Compute the output for a given input to the neural network. This method
        /// provides a parameter to specify an output holder to use.  This holder
        /// allows propagation training to track the output from each layer.
        /// If you do not need this holder pass null, or use the other 
        /// compare method.
        /// </summary>
        /// <param name="input">The input provide to the neural network.</param>
        /// <param name="useHolder">Allows a holder to be specified, this allows
        /// propagation training to check the output of each layer.</param>
        /// <returns>The results from the output neurons.</returns>
        public virtual INeuralData Compute(INeuralData input,
                 NeuralOutputHolder useHolder)
        {
            NeuralOutputHolder holder;

            ILayer inputLayer = this.network.GetLayer(BasicNetwork.TAG_INPUT);

#if logging
            if (FeedforwardLogic.logger.IsDebugEnabled)
            {
                FeedforwardLogic.logger.Debug("Pattern " + input.ToString()
                    + " presented to neural network");
            }
#endif

            if (useHolder == null && this.network.Structure.Flat != null)
            {
                this.network.Structure.UpdateFlatNetwork();
                INeuralData result = new BasicNeuralData(this.network.Structure.Flat.OutputCount);
                this.network.Structure.Flat.Compute(input.Data, result.Data);
                return result;
            }

            if (useHolder == null)
            {
                holder = new NeuralOutputHolder();
            }
            else
            {
                holder = useHolder;
            }

            Compute(holder, inputLayer, input, null);
            return holder.Output;
        }
        /// <summary>
        /// Compute the weightless output from this synapse. Each neuron
        /// in the from layer has a weightless connection to each of the
        /// neurons in the next layer. 
        /// </summary>
        /// <param name="input">The input from the synapse.</param>
        /// <returns>The output from this synapse.</returns>
        public override INeuralData Compute(INeuralData input)
        {
            INeuralData result = new BasicNeuralData(this.ToNeuronCount);
            // just sum the input
            double sum = 0;
            for (int i = 0; i < input.Count; i++)
            {
                sum += input[i];
            }

            for (int i = 0; i < this.ToNeuronCount; i++)
            {
                result[i] = sum;
            }
            return result;
        }