public override double[][][] EstimateXi(IMLDataSet sequence, ForwardBackwardCalculator fbc, HiddenMarkovModel hmm) { if (sequence.Count <= 1) { throw new EncogError( "Must have more than one observation"); } double[][][] xi = EngineArray.AllocDouble3D((int)sequence.Count - 1, hmm.StateCount, hmm.StateCount); double probability = fbc.Probability(); for (int t = 0; t < (sequence.Count - 1); t++) { IMLDataPair o = sequence[t + 1]; for (int i = 0; i < hmm.StateCount; i++) { for (int j = 0; j < hmm.StateCount; j++) { xi[t][i][j] = (fbc.AlphaElement(t, i) * hmm.TransitionProbability[i][j] * hmm.StateDistributions[j].Probability(o) * fbc .BetaElement(t + 1, j)) / probability; } } } return(xi); }
/// <summary> /// Called internally to advance to the next row. /// </summary> /// <returns>True if there are more rows to reed.</returns> private bool Next() { // see if any of the CSV readers want to stop if (_readCSV.Any(csv => !csv.Next())) { return(false); } // see if any of the data sets want to stop foreach (var iterator in _readDataSet) { if (!iterator.MoveNext()) // are we sure that we intended for every other item here? an explanation would be helpful { return(false); } MLDataFieldHolder holder = _dataSetIteratorMap [iterator]; IMLDataPair pair = iterator.Current; holder.Pair = pair; } // see if any of the arrays want to stop if (_inputFields.OfType <IHasFixedLength>().Any(fl => (_currentIndex + 1) >= fl.Length)) { return(false); } _currentIndex++; return(true); }
/// <summary> /// Evaluate memory. /// </summary> private void EvalMemory() { BasicMLDataSet training = RandomTrainingFactory.Generate( 1000, 10000, 10, 10, -1, 1); const long stop = (10 * Evaluate.Milis); int record = 0; IMLDataPair pair = BasicMLDataPair.CreatePair(10, 10); int iterations = 0; var watch = new Stopwatch(); watch.Start(); while (watch.ElapsedMilliseconds < stop) { iterations++; training.GetRecord(record++, pair); if (record >= training.Count) { record = 0; } } iterations /= 100000; _report.Report(Steps, Step2, "Memory dataset, result: " + Format.FormatInteger(iterations)); _memoryScore = iterations; }
/// <summary> /// Called internally to obtain the current value for an input field. /// </summary> /// <param name="field">The input field to determine.</param> /// <param name="index">The current index.</param> /// <returns>The value for this input field.</returns> private void DetermineInputFieldValue(IInputField field, int index) { double result; if (field is InputFieldCSV) { var fieldCSV = (InputFieldCSV)field; ReadCSV csv = _csvMap[field]; result = csv.GetDouble(fieldCSV.Offset); } else if (field is InputFieldMLDataSet) { var mlField = (InputFieldMLDataSet)field; MLDataFieldHolder holder = _dataSetFieldMap [field]; IMLDataPair pair = holder.Pair; int offset = mlField.Offset; if (offset < pair.Input.Count) { result = pair.Input[offset]; } else { offset -= pair.Input.Count; result = pair.Ideal[offset]; } } else { result = field.GetValue(index); } field.CurrentValue = result; return; }
public void ActivationTemporal() { var temporal = new TemporalMLDataSet(5, 1); temporal.AddDescription(new TemporalDataDescription(new ActivationTANH(), TemporalDataDescription.Type.Raw, true, false)); temporal.AddDescription(new TemporalDataDescription(new ActivationTANH(), TemporalDataDescription.Type.Raw, true, false)); temporal.AddDescription(new TemporalDataDescription(new ActivationTANH(), TemporalDataDescription.Type.Raw, false, true)); for (int i = 0; i < 10; i++) { TemporalPoint tp = temporal.CreatePoint(i); tp[0] = 1.0 + (i * 3); tp[1] = 2.0 + (i * 3); tp[2] = 3.0 + (i * 3); } temporal.Generate(); IEnumerator <IMLDataPair> itr = temporal.GetEnumerator(); // set 0 itr.MoveNext(); IMLDataPair pair = itr.Current; Assert.AreEqual(10, pair.Input.Count); Assert.AreEqual(1, pair.Ideal.Count); Assert.AreEqual(0.75, Math.Round(pair.Input[0] * 4.0) / 4.0); Assert.AreEqual(1.0, Math.Round(pair.Input[1] * 4.0) / 4.0); Assert.AreEqual(1.0, Math.Round(pair.Input[2] * 4.0) / 4.0); Assert.AreEqual(1.0, Math.Round(pair.Input[3] * 4.0) / 4.0); }
public static BasicMLDataSet CreateEvaluationSetAndLoad(string @fileName, int startLine, int HowMany, int WindowSize, int outputsize) { List <double> Opens = QuickCSVUtils.QuickParseCSV(fileName, "Open", startLine, HowMany); List <double> High = QuickCSVUtils.QuickParseCSV(fileName, "High", startLine, HowMany); // List<double> Low = QuickCSVUtils.QuickParseCSV(fileName, "Low", startLine, HowMany); List <double> Close = QuickCSVUtils.QuickParseCSV(fileName, "Close", startLine, HowMany); List <double> Volume = QuickCSVUtils.QuickParseCSV(fileName, 5, startLine, HowMany); double[] Ranges = NetworkUtility.CalculateRanges(Opens.ToArray(), Close.ToArray()); IMLDataPair aPairInput = TrainerHelper.ProcessPairs(NetworkUtility.CalculatePercents(Opens.ToArray()), NetworkUtility.CalculatePercents(Opens.ToArray()), WindowSize, outputsize); IMLDataPair aPairInput3 = TrainerHelper.ProcessPairs(NetworkUtility.CalculatePercents(Close.ToArray()), NetworkUtility.CalculatePercents(Close.ToArray()), WindowSize, outputsize); IMLDataPair aPairInput2 = TrainerHelper.ProcessPairs(NetworkUtility.CalculatePercents(High.ToArray()), NetworkUtility.CalculatePercents(High.ToArray()), WindowSize, outputsize); IMLDataPair aPairInput4 = TrainerHelper.ProcessPairs(NetworkUtility.CalculatePercents(Volume.ToArray()), NetworkUtility.CalculatePercents(Volume.ToArray()), WindowSize, outputsize); IMLDataPair aPairInput5 = TrainerHelper.ProcessPairs(NetworkUtility.CalculatePercents(Ranges.ToArray()), NetworkUtility.CalculatePercents(Ranges.ToArray()), WindowSize, outputsize); List <IMLDataPair> listData = new List <IMLDataPair>(); listData.Add(aPairInput); listData.Add(aPairInput2); listData.Add(aPairInput3); listData.Add(aPairInput4); listData.Add((aPairInput5)); var minitrainning = new BasicMLDataSet(listData); return(minitrainning); }
/// <summary> /// Process the data array and returns an IMLdatapair. /// </summary> /// /// <param name="data">The array to process.</param> /// <returns>An IMLDatapair containing data.</returns> public IMLDataPair ProcessToPair(double[] data) { IMLDataPair pair = null; int totalWindowSize = _inputWindow + _predictWindow; int stopPoint = data.Length - totalWindowSize; for (int i = 0; i < stopPoint; i++) { IMLData inputData = new BasicMLData(_inputWindow); IMLData idealData = new BasicMLData(_predictWindow); int index = i; // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = data[index++]; } // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] = data[index++]; } pair = new BasicMLDataPair(inputData, idealData); } return(pair); }
/// <summary> /// Compute alpha. /// </summary> /// <param name="hmm">The hidden markov model.</param> /// <param name="oseq">The sequence.</param> protected void ComputeAlpha(HiddenMarkovModel hmm, IMLDataSet oseq) { Alpha = EngineArray.AllocateDouble2D((int)oseq.Count, hmm.StateCount); for (int i = 0; i < hmm.StateCount; i++) { ComputeAlphaInit(hmm, oseq[0], i); } IEnumerator <IMLDataPair> seqIterator = oseq.GetEnumerator(); if (seqIterator.MoveNext()) { for (int t = 1; t < oseq.Count; t++) { seqIterator.MoveNext(); ///// IMLDataPair observation = seqIterator.Current; for (int i = 0; i < hmm.StateCount; i++) { ComputeAlphaStep(hmm, observation, t, i); } } } }
/// <summary> /// Construct a gradient worker. /// </summary> /// /// <param name="theNetwork">The network to train.</param> /// <param name="theOwner">The owner that is doing the training.</param> /// <param name="theTraining">The training data.</param> /// <param name="theLow">The low index to use in the training data.</param> /// <param name="theHigh">The high index to use in the training data.</param> /// <param name="theFlatSpots">Holds an array of flat spot constants.</param> public GradientWorker(FlatNetwork theNetwork, Propagation theOwner, IMLDataSet theTraining, int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef) { _errorCalculation = new ErrorCalculation(); _network = theNetwork; _training = theTraining; _low = theLow; _high = theHigh; _owner = theOwner; _flatSpot = theFlatSpots; _layerDelta = new double[_network.LayerOutput.Length]; _gradients = new double[_network.Weights.Length]; _actual = new double[_network.OutputCount]; _weights = _network.Weights; _layerIndex = _network.LayerIndex; _layerCounts = _network.LayerCounts; _weightIndex = _network.WeightIndex; _layerOutput = _network.LayerOutput; _layerSums = _network.LayerSums; _layerFeedCounts = _network.LayerFeedCounts; _ef = ef; _pair = BasicMLDataPair.CreatePair(_network.InputCount, _network.OutputCount); }
/// <summary> /// Process one training set element. /// </summary> /// /// <param name="input">The network input.</param> /// <param name="ideal">The ideal values.</param> /// <param name="s">The significance of this error.</param> private void Process(IMLDataPair pair) { _network.Compute(pair.Input, _actual); _errorCalculation.UpdateError(_actual, pair.Ideal, pair.Significance); // Calculate error for the output layer. _ef.CalculateError( _network.ActivationFunctions[0], _layerSums, _layerOutput, pair.Ideal, _actual, _layerDelta, _flatSpot[0], pair.Significance); // Apply regularization, if requested. if (_owner.L1 > EncogFramework.DefaultDoubleEqual || _owner.L1 > EncogFramework.DefaultDoubleEqual) { double[] lp = new double[2]; CalculateRegularizationPenalty(lp); for (int i = 0; i < _actual.Length; i++) { double p = (lp[0] * _owner.L1) + (lp[1] * _owner.L2); _layerDelta[i] += p; } } // Propagate backwards (chain rule from calculus). for (int i = _network.BeginTraining; i < _network .EndTraining; i++) { ProcessLevel(i); } }
/// <summary> /// Construct the LMA object. /// </summary> /// <param name="network">The network to train. Must have a single output neuron.</param> /// <param name="training">The training data to use. Must be indexable.</param> /// <param name="h">The Hessian calculator to use.</param> public LevenbergMarquardtTraining(BasicNetwork network, IMLDataSet training, IComputeHessian h) : base(TrainingImplementationType.Iterative) { ValidateNetwork.ValidateMethodToData(network, training); Training = training; _indexableTraining = Training; this._network = network; _trainingLength = (int)_indexableTraining.Count; _weightCount = this._network.Structure.CalculateSize(); _lambda = 0.1; _deltas = new double[_weightCount]; _diagonal = new double[_weightCount]; var input = new BasicMLData( _indexableTraining.InputSize); var ideal = new BasicMLData( _indexableTraining.IdealSize); _pair = new BasicMLDataPair(input, ideal); _hessian = h; _hessian.Init(network, training); }
/// <summary> /// Construct the LMA object. /// </summary> /// /// <param name="network">The network to train. Must have a single output neuron.</param> /// <param name="training">The training data to use. Must be indexable.</param> public LevenbergMarquardtTraining(BasicNetwork network, IMLDataSet training) : base(TrainingImplementationType.Iterative) { ValidateNetwork.ValidateMethodToData(network, training); if (network.OutputCount != 1) { throw new TrainingError( "Levenberg Marquardt requires an output layer with a single neuron."); } Training = training; _indexableTraining = Training; _network = network; _trainingLength = (int)_indexableTraining.Count; _parametersLength = _network.Structure.CalculateSize(); _hessianMatrix = new Matrix(_parametersLength, _parametersLength); _hessian = _hessianMatrix.Data; _alpha = 0.0d; _beta = 1.0d; _lambda = 0.1d; _deltas = new double[_parametersLength]; _gradient = new double[_parametersLength]; _diagonal = new double[_parametersLength]; var input = new BasicMLData( _indexableTraining.InputSize); var ideal = new BasicMLData( _indexableTraining.IdealSize); _pair = new BasicMLDataPair(input, ideal); }
public JacobianChainRule(BasicNetwork network, IMLDataSet indexableTraining) { BasicMLData data; BasicMLData data2; if (0 == 0) { goto Label_0055; } Label_0009: this._x61830ac74d65acc3 = new BasicMLDataPair(data, data2); return; Label_0055: this._xb12276308f0fa6d9 = indexableTraining; if (0 == 0) { } this._x87a7fc6a72741c2e = network; this._xabb126b401219ba2 = network.Structure.CalculateSize(); this._x530ae94d583e0ea1 = (int) this._xb12276308f0fa6d9.Count; this._xbdeab667c25bbc32 = EngineArray.AllocateDouble2D(this._x530ae94d583e0ea1, this._xabb126b401219ba2); this._xc8a462f994253347 = new double[this._x530ae94d583e0ea1]; data = new BasicMLData(this._xb12276308f0fa6d9.InputSize); data2 = new BasicMLData(this._xb12276308f0fa6d9.IdealSize); if (-2147483648 != 0) { goto Label_0009; } goto Label_0055; }
/// <summary> /// Process the data array and returns an IMLdatapair. /// </summary> /// /// <param name="data">The array to process.</param> /// <returns>An IMLDatapair containing data.</returns> public IMLDataPair ProcessToPair(double[] data) { // not sure this method works right: it's only using the last pair? IMLDataPair pair = null; int totalWindowSize = _inputWindow + _predictWindow; int stopPoint = data.Length - totalWindowSize; for (int i = 0; i < stopPoint; i++) { var inputData = new BasicMLData(_inputWindow); var idealData = new BasicMLData(_predictWindow); int index = i; // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = data[index++]; } // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] = data[index++]; } pair = new BasicMLDataPair(inputData, idealData); } return(pair); }
/// <summary> /// Process one training set element. /// </summary> /// <param name="outputNeuron">The output neuron.</param> private void Process(int outputNeuron, IMLDataPair pair) { _flat.Compute(pair.Input, _actual); double e = pair.Ideal[outputNeuron] - _actual[outputNeuron]; _error += e * e; for (int i = 0; i < _actual.Length; i++) { if (i == outputNeuron) { _layerDelta[i] = _flat.ActivationFunctions[0] .DerivativeFunction(_layerSums[i], _layerOutput[i]); } else { _layerDelta[i] = 0; } } for (int i = _flat.BeginTraining; i < _flat.EndTraining; i++) { ProcessLevel(i); } // calculate gradients for (int j = 0; j < _weights.Length; j++) { _gradients[j] += e * _derivative[j]; _totDeriv[j] += _derivative[j]; } }
public void Run(int index) { IMLDataPair pair = _training[index]; Process(pair); _owner.Report(_gradients, 0, null); EngineArray.Fill(_gradients, 0); }
/// <summary> /// Add input and expected output. This is used for supervised training. /// </summary> /// <param name="inputData">The input data to train on.</param> public override void Add(IMLDataPair inputData) { if (!(inputData.Input is ImageMLData)) { throw new NeuralNetworkError(MUST_USE_IMAGE); } base.Add(inputData); }
/// <summary> /// Called to load training data for a company. This is how the training data is actually created. /// To prepare input data for recognition use the CreateData method. The training set will be /// added to. This allows the network to learn from multiple companies if this method is called /// multiple times. /// </summary> /// <param name="symbol">The ticker symbol.</param> /// <param name="training">The training set to add to.</param> /// <param name="from">Beginning date</param> /// <param name="to">Ending date</param> public void LoadCompany(String symbol, BasicMLDataSet training, DateTime from, DateTime to) { IMarketLoader loader = new YahooFinanceLoader(); var ticker = new TickerSymbol(symbol); IList <MarketDataType> dataNeeded = new List <MarketDataType>(); dataNeeded.Add(MarketDataType.AdjustedClose); dataNeeded.Add(MarketDataType.Close); dataNeeded.Add(MarketDataType.Open); dataNeeded.Add(MarketDataType.High); dataNeeded.Add(MarketDataType.Low); var results = (List <LoadedMarketData>)loader.Load(ticker, dataNeeded, from, to); results.Sort(); for (var index = PredictWindow; index < results.Count - EvalWindow; index++) { var data = results[index]; // determine bull or bear position, or neither var bullish = false; var bearish = false; for (int search = 1; search <= EvalWindow; search++) { var data2 = results[index + search]; var priceBase = data.GetData(MarketDataType.AdjustedClose); var priceCompare = data2.GetData(MarketDataType.AdjustedClose); var diff = priceCompare - priceBase; var percent = diff / priceBase; if (percent > BullPercent) { bullish = true; } else if (percent < BearPercent) { bearish = true; } } IMLDataPair pair = null; if (bullish) { pair = CreateData(results, index, true); } else if (bearish) { pair = CreateData(results, index, false); } if (pair != null) { training.Add(pair); } } }
/// <inheritdoc/> public void Write(double[] input, double[] ideal, double significance) { IMLDataPair pair = BasicMLDataPair.CreatePair(_inputSize, _idealSize); EngineArray.ArrayCopy(input, pair.Input.Data); EngineArray.ArrayCopy(ideal, pair.Ideal.Data); pair.Significance = significance; }
/// <summary> /// Read an individual record. /// </summary> /// <param name="index">The zero-based index. Specify 0 for the first record, 1 for /// the second, and so on.</param> /// <param name="pair">The data to read.</param> public void GetRecord(long index, IMLDataPair pair) { double[] inputTarget = pair.InputArray; double[] idealTarget = pair.IdealArray; egb.SetLocation((int)index); egb.Read(inputTarget); egb.Read(idealTarget); pair.Significance = egb.Read(); }
/// <summary> /// Add a data pair of both input and ideal data. /// </summary> /// <param name="pair">The pair to add.</param> public void Add(IMLDataPair pair) { if (!loading) { throw new IMLDataError(ERROR_ADD); } egb.Write(pair.Input.Data); egb.Write(pair.Ideal.Data); egb.Write(pair.Significance); }
/// <summary> /// Add a data pair of both input and ideal data. /// </summary> /// <param name="pair">The pair to add.</param> public void Add(IMLDataPair pair) { if (!_loading) { throw new IMLDataError(ErrorAdd); } _egb.Write(pair.Input); _egb.Write(pair.Ideal); _egb.Write(pair.Significance); }
/// <summary> /// Evaluate disk. /// </summary> private void EvalBinary() { FileInfo file = FileUtil.CombinePath(new FileInfo(Path.GetTempPath()), "temp.egb"); BasicMLDataSet training = RandomTrainingFactory.Generate( 1000, 10000, 10, 10, -1, 1); // create the binary file if (file.Exists) { file.Delete(); } var training2 = new BufferedMLDataSet(file.ToString()); training2.Load(training); const long stop = (10 * Evaluate.Milis); int record = 0; IMLDataPair pair = BasicMLDataPair.CreatePair(10, 10); var watch = new Stopwatch(); watch.Start(); int iterations = 0; while (watch.ElapsedMilliseconds < stop) { iterations++; training2.GetRecord(record++, pair); if (record >= training2.Count) { record = 0; } } training2.Close(); iterations /= 100000; _report.Report(Steps, Step3, "Disk(binary) dataset, result: " + Format.FormatInteger(iterations)); if (file.Exists) { file.Delete(); } _binaryScore = iterations; }
/// <summary> /// Compute the alpha step. /// </summary> /// <param name="hmm">The hidden markov model.</param> /// <param name="o">The sequence element.</param> /// <param name="t">The alpha step.</param> /// <param name="j">The column.</param> protected void ComputeAlphaStep(HiddenMarkovModel hmm, IMLDataPair o, int t, int j) { double sum = 0.0; for (int i = 0; i < hmm.StateCount; i++) { sum += Alpha[t - 1][i] * hmm.TransitionProbability[i][j]; } Alpha[t][j] = sum * hmm.StateDistributions[j].Probability(o); }
/// <summary> /// The SSE error with the current weights. /// </summary> /// <returns></returns> private double CalculateError() { var result = new ErrorCalculation(); for (int i = 0; i < _trainingLength; i++) { _pair = _indexableTraining[i]; IMLData actual = _network.Compute(_pair.Input); result.UpdateError(actual, _pair.Ideal, _pair.Significance); } return(result.CalculateSSE()); }
/// <summary> /// Compute the beta step. /// </summary> /// <param name="hmm">The hidden markov model.</param> /// <param name="o">THe data par to compute.</param> /// <param name="t">THe matrix row.</param> /// <param name="i">THe matrix column.</param> protected void ComputeBetaStep(HiddenMarkovModel hmm, IMLDataPair o, int t, int i) { double sum = 0.0; for (int j = 0; j < hmm.StateCount; j++) { sum += Beta[t + 1][j] * hmm.TransitionProbability[i][j] * hmm.StateDistributions[j].Probability(o); } Beta[t][i] = sum; }
/// <summary> /// Makes a random dataset with the number of IMLDatapairs. /// Quite useful to test networks (benchmarks). /// </summary> /// <param name="inputs">The inputs.</param> /// <param name="predictWindow">The predict window.</param> /// <param name="numberofPairs">The numberof pairs.</param> /// <returns></returns> public static BasicMLDataSet MakeRandomIMLDataset(int inputs, int predictWindow, int numberofPairs) { BasicMLDataSet SuperSet = new BasicMLDataSet(); for (int i = 0; i < numberofPairs; i++) { double[] firstinput = MakeInputs(inputs); double[] secondideal = MakeInputs(inputs); IMLDataPair pair = ProcessPairs(firstinput, secondideal, inputs, predictWindow); SuperSet.Add(pair); } return(SuperSet); }
public GradientWorker(FlatNetwork theNetwork, TrainFlatNetworkProp theOwner, IMLDataSet theTraining, int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef) { goto Label_0155; Label_0114: this._x071bde1041617fce = theOwner; this._x0ba854627e1326f9 = theFlatSpots; this._x58c3d5da5c5c72db = new double[this._x87a7fc6a72741c2e.LayerOutput.Length]; this._xe05127febf8b7904 = new double[this._x87a7fc6a72741c2e.Weights.Length]; this._xd505507cf33ae543 = new double[this._x87a7fc6a72741c2e.OutputCount]; if (0 == 0) { this._x2f33d779e5a20b28 = this._x87a7fc6a72741c2e.Weights; if ((((uint) theHigh) + ((uint) theLow)) <= uint.MaxValue) { this._xb25095f37f20a1c1 = this._x87a7fc6a72741c2e.LayerIndex; if (((uint) theLow) <= uint.MaxValue) { this._xe05f7b8f952f0ba4 = this._x87a7fc6a72741c2e.LayerCounts; this._x7d5bf19d36074a85 = this._x87a7fc6a72741c2e.WeightIndex; this._x5e72e5e601f79c78 = this._x87a7fc6a72741c2e.LayerOutput; this._x59e01312f2f4aa96 = this._x87a7fc6a72741c2e.LayerSums; this._xc99b49dd213196ca = this._x87a7fc6a72741c2e.LayerFeedCounts; this._x2cb049236d33bbda = ef; } } } this._x61830ac74d65acc3 = BasicMLDataPair.CreatePair(this._x87a7fc6a72741c2e.InputCount, this._x87a7fc6a72741c2e.OutputCount); if (0 == 0) { return; } Label_0155: this._x84e81691256999b2 = new ErrorCalculation(); this._x87a7fc6a72741c2e = theNetwork; this._x823a2b9c8bf459c5 = theTraining; if (0xff == 0) { return; } do { if ((((uint) theHigh) + ((uint) theLow)) > uint.MaxValue) { goto Label_0114; } this._xd12d1dba8a023d95 = theLow; } while (0 != 0); this._x628ea9b89457a2a9 = theHigh; goto Label_0114; }
public ViterbiCalculator(IMLDataSet oseq, HiddenMarkovModel hmm) { if (oseq.Count < 1) { throw new EncogError("Must not have empty sequence"); } this.delta = EngineArray.AllocateDouble2D((int)oseq.Count, hmm.StateCount); this.psy = EngineArray.AllocateInt2D((int)oseq.Count, hmm.StateCount); this._stateSequence = new int[oseq.Count]; for (int i = 0; i < hmm.StateCount; i++) { this.delta[0][i] = -Math.Log(hmm.GetPi(i)) - Math.Log(hmm.StateDistributions[i].Probability( oseq[0])); this.psy[0][i] = 0; } int t = 1; for (int index = 1; index < oseq.Count; index++) { IMLDataPair observation = oseq[index]; for (int i = 0; i < hmm.StateCount; i++) { ComputeStep(hmm, observation, t, i); } t++; } this.lnProbability = Double.PositiveInfinity; for (int i = 0; i < hmm.StateCount; i++) { double thisProbability = this.delta[oseq.Count - 1][i]; if (this.lnProbability > thisProbability) { this.lnProbability = thisProbability; _stateSequence[oseq.Count - 1] = i; } } this.lnProbability = -this.lnProbability; for (int t2 = (int)(oseq.Count - 2); t2 >= 0; t2--) { _stateSequence[t2] = this.psy[t2 + 1][_stateSequence[t2 + 1]]; } }
/// <summary> /// Get the minimum, over all the data, for the specified index. /// </summary> /// /// <param name="index">An index into the input data.</param> /// <returns>The minimum value.</returns> private double GetMinValue(int index) { double result = Double.MaxValue; long count = _set.Count; IMLDataPair pair = BasicMLDataPair.CreatePair( _set.InputSize, _set.IdealSize); for (int i = 0; i < count; i++) { _set.GetRecord(index, pair); result = Math.Min(result, pair.InputArray[index]); } return(result); }
/// <inheritdoc/> public double Probability(IMLDataPair o) { // double[] v = o.InputArray; // Matrix vmm = Matrix.CreateColumnMatrix(EngineArray.Subtract(v, Matrix vmm = Matrix.CreateColumnMatrix(EngineArray.Subtract(o.Input, _mean)); Matrix t = MatrixMath.Multiply(_covarianceInv, vmm); double expArg = MatrixMath.Multiply(MatrixMath.Transpose(vmm), t) [0, 0] * -0.5; return(Math.Exp(expArg) / (Math.Pow(2.0 * Math.PI, _dimension / 2.0) * Math.Pow( _covarianceDet, 0.5))); }
/// <summary> /// Determine the probability of the specified data pair. /// </summary> /// <param name="o">THe data pair.</param> /// <returns>The probability.</returns> public double Probability(IMLDataPair o) { double result = 1; for (int i = 0; i < _probabilities.Length; i++) { if (o.Input[i] > (_probabilities[i].Length - 1)) { throw new EncogError("Wrong observation value"); } result *= _probabilities[i][(int)o.Input[i]]; } return(result); }
public LevenbergMarquardtTraining(BasicNetwork network, IMLDataSet training) : base(TrainingImplementationType.Iterative) { if (2 != 0) { ValidateNetwork.ValidateMethodToData(network, training); if (network.OutputCount != 1) { throw new TrainingError("Levenberg Marquardt requires an output layer with a single neuron."); } this.Training = training; goto Label_0134; } Label_00A8: this._xdadd8f92d75a3aba = new double[this._xe2982b936ae423cd]; this._x878c4eb3cef19a5a = new double[this._xe2982b936ae423cd]; this._x3cb63876dda4b74a = new double[this._xe2982b936ae423cd]; if (0xff == 0) { return; } BasicMLData input = new BasicMLData(this._xb12276308f0fa6d9.InputSize); BasicMLData ideal = new BasicMLData(this._xb12276308f0fa6d9.IdealSize); this._x61830ac74d65acc3 = new BasicMLDataPair(input, ideal); if (-1 != 0) { return; } Label_0134: this._xb12276308f0fa6d9 = this.Training; this._x87a7fc6a72741c2e = network; this._x8557b7ee760663f3 = (int) this._xb12276308f0fa6d9.Count; this._xe2982b936ae423cd = this._x87a7fc6a72741c2e.Structure.CalculateSize(); this._x05fb16197e552de6 = new Matrix(this._xe2982b936ae423cd, this._xe2982b936ae423cd); this._xc410e3804222557a = this._x05fb16197e552de6.Data; this._x6ad505c7ef981b0e = 0.0; this._xd7d571ecee49d1e4 = 1.0; this._x3271cefb1a159639 = 0.1; goto Label_00A8; }
public void Add(IMLDataPair inputData) { throw new TrainingError("Direct adds to the folded dataset are not supported."); }
/// <inheritdoc/> public double Probability(IMLDataPair o) { // double[] v = o.InputArray; // Matrix vmm = Matrix.CreateColumnMatrix(EngineArray.Subtract(v, Matrix vmm = Matrix.CreateColumnMatrix(EngineArray.Subtract(o.Input, _mean)); Matrix t = MatrixMath.Multiply(_covarianceInv, vmm); double expArg = MatrixMath.Multiply(MatrixMath.Transpose(vmm), t) [0, 0]*-0.5; return Math.Exp(expArg) /(Math.Pow(2.0*Math.PI, _dimension/2.0)*Math.Pow( _covarianceDet, 0.5)); }
/// <summary> /// Get a record. /// </summary> /// <param name="index">The index.</param> /// <param name="pair">The record.</param> public void GetRecord(int index, IMLDataPair pair) { _underlying.GetRecord(CurrentFoldOffset + index, pair); }
/// <summary> /// Adding directly is not supported. Rather, add temporal points and /// generate the training data. /// </summary> /// <param name="inputData">Not used.</param> public override sealed void Add(IMLDataPair inputData) { throw new TemporalError(AddNotSupported); }
/// <summary> /// Get the cluster for the specified data pair. /// </summary> /// <param name="o">The data pair to use..</param> /// <returns>The cluster the pair is in.</returns> public int Cluster(IMLDataPair o) { return _clustersHash[o]; }
/// <summary> /// Determine the probability of the specified data pair. /// </summary> /// <param name="o">THe data pair.</param> /// <returns>The probability.</returns> public double Probability(IMLDataPair o) { double result = 1; for (int i = 0; i < _probabilities.Length; i++) { if (o.Input[i] > (_probabilities[i].Length - 1)) { throw new EncogError("Wrong observation value"); } result *= _probabilities[i][(int) o.Input[i]]; } return result; }
/// <summary> /// Put an object into the specified cluster. /// </summary> /// <param name="o">The object.</param> /// <param name="n">The cluster number.</param> public void Put(IMLDataPair o, int n) { _clustersHash[o] = n; _clusters[n].Add(o); }
public IMLDataPair RestoreDataVector(IMLDataPair vectorTorestore) { return this._x9a9fa564793616f5.RestoreDataVector(vectorTorestore); }
public IMLDataPair ProcessDataVector(IMLDataPair vectorToProcess) { return this._x9a9fa564793616f5.ProcessDataVector(vectorToProcess); }
/// <summary> /// The SSE error with the current weights. /// </summary> /// <returns></returns> private double CalculateError() { var result = new ErrorCalculation(); for (int i = 0; i < _trainingLength; i++) { _pair = _indexableTraining[i]; IMLData actual = _network.Compute(_pair.Input); result.UpdateError(actual, _pair.Ideal, _pair.Significance); } return result.CalculateSSE(); }
/// <summary> /// Construct the LMA object. /// </summary> /// <param name="network">The network to train. Must have a single output neuron.</param> /// <param name="training"></param> /// <param name="h">The training data to use. Must be indexable.</param> public LevenbergMarquardtTraining(BasicNetwork network, IMLDataSet training, IComputeHessian h) : base(TrainingImplementationType.Iterative) { ValidateNetwork.ValidateMethodToData(network, training); Training = training; _indexableTraining = Training; this._network = network; _trainingLength = _indexableTraining.Count; _weightCount = this._network.Structure.CalculateSize(); _lambda = 0.1; _deltas = new double[_weightCount]; _diagonal = new double[_weightCount]; var input = new BasicMLData( _indexableTraining.InputSize); var ideal = new BasicMLData( _indexableTraining.IdealSize); _pair = new BasicMLDataPair(input, ideal); _hessian = h; }
/// <summary> /// Construct a gradient worker. /// </summary> /// /// <param name="theNetwork">The network to train.</param> /// <param name="theOwner">The owner that is doing the training.</param> /// <param name="theTraining">The training data.</param> /// <param name="theLow">The low index to use in the training data.</param> /// <param name="theHigh">The high index to use in the training data.</param> /// <param name="theFlatSpots">Holds an array of flat spot constants.</param> public GradientWorker(FlatNetwork theNetwork, TrainFlatNetworkProp theOwner, IMLDataSet theTraining, int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef) { _errorCalculation = new ErrorCalculation(); _network = theNetwork; _training = theTraining; _low = theLow; _high = theHigh; _owner = theOwner; _flatSpot = theFlatSpots; _layerDelta = new double[_network.LayerOutput.Length]; _gradients = new double[_network.Weights.Length]; _actual = new double[_network.OutputCount]; _weights = _network.Weights; _layerIndex = _network.LayerIndex; _layerCounts = _network.LayerCounts; _weightIndex = _network.WeightIndex; _layerOutput = _network.LayerOutput; _layerSums = _network.LayerSums; _layerFeedCounts = _network.LayerFeedCounts; _ef = ef; _pair = BasicMLDataPair.CreatePair(_network.InputCount, _network.OutputCount); }
/// <summary> /// Remove an object from the specified cluster. /// </summary> /// <param name="o">The object to remove.</param> /// <param name="n">The cluster to remove from.</param> public void Remove(IMLDataPair o, int n) { _clustersHash[o] = -1; _clusters[n].Remove(o); }
private void ComputeStep(HiddenMarkovModel hmm, IMLDataPair o, int t, int j) { double minDelta = Double.PositiveInfinity; int min_psy = 0; for (int i = 0; i < hmm.StateCount; i++) { double thisDelta = this.delta[t - 1][i] - Math.Log(hmm.TransitionProbability[i][j]); if (minDelta > thisDelta) { minDelta = thisDelta; min_psy = i; } } this.delta[t][j] = minDelta - Math.Log(hmm.StateDistributions[j].Probability(o)); this.psy[t][j] = min_psy; }
/// <summary> /// Obtain the next pair. /// </summary> public void ObtainPair() { _iterator.MoveNext(); _pair = _iterator.Current; }
private double xddf5c75e1d743e26(IMLDataPair x61830ac74d65acc3) { double num2; int num3; int num4; int layerTotalNeuronCount; int layerNeuronCount; double layerOutput; int num8; int num10; double num12; int num13; int num14; int num15; int num17; double num = 0.0; if ((((uint) num17) + ((uint) num8)) >= 0) { goto Label_030C; } goto Label_0039; Label_0030: if (num10 < layerNeuronCount) { IActivationFunction activation; double num11; layerOutput = this._x87a7fc6a72741c2e.GetLayerOutput(num4, num10); do { activation = this._x87a7fc6a72741c2e.GetActivation(num4); num11 = this._x87a7fc6a72741c2e.GetWeight(num4, num10, 0); } while ((((uint) num10) | uint.MaxValue) == 0); num12 = (xd3eb00c1c38e3a49(activation, layerOutput) * xbc17cb206c45d25e(activation, num2)) * num11; if (((uint) num) >= 0) { goto Label_0106; } goto Label_00B3; } Label_0039: if (num3 > 0) { num3--; num4--; if ((((uint) layerOutput) - ((uint) layerNeuronCount)) <= uint.MaxValue) { layerTotalNeuronCount = this._x87a7fc6a72741c2e.GetLayerTotalNeuronCount(num3); layerNeuronCount = this._x87a7fc6a72741c2e.GetLayerNeuronCount(num4); if ((((uint) num8) + ((uint) layerTotalNeuronCount)) > uint.MaxValue) { goto Label_0095; } if (2 == 0) { goto Label_0115; } if ((((uint) num3) + ((uint) layerNeuronCount)) < 0) { goto Label_0352; } num10 = 0; } goto Label_0030; } if ((((uint) num12) - ((uint) num17)) >= 0) { if ((((uint) num15) & 0) == 0) { return num; } goto Label_0039; } goto Label_00B3; Label_005D: if (num14 < layerNeuronCount) { goto Label_00B3; } this._xbdeab667c25bbc32[this._x4c51ad74d6bcc9e9][this._x82d75873c9eb7116++] = num12 * this._x87a7fc6a72741c2e.GetLayerOutput(num3, num13); Label_0095: num13++; Label_009B: if (num13 < layerTotalNeuronCount) { num2 = 0.0; goto Label_0115; } if (((uint) num10) <= uint.MaxValue) { num10++; goto Label_0030; } Label_00B3: num15 = 0; while (num15 < layerTotalNeuronCount) { num2 += this._x87a7fc6a72741c2e.GetWeight(num3, num15, num14) * layerOutput; Label_00CE: num15++; } num14++; goto Label_005D; Label_0106: num13 = 0; goto Label_009B; Label_0115: num14 = 0; goto Label_005D; Label_030C: num2 = 0.0; this._x87a7fc6a72741c2e.Compute(x61830ac74d65acc3.Input); num3 = this._x87a7fc6a72741c2e.LayerCount - 2; num4 = this._x87a7fc6a72741c2e.LayerCount - 1; layerTotalNeuronCount = this._x87a7fc6a72741c2e.GetLayerTotalNeuronCount(num3); Label_0352: layerNeuronCount = this._x87a7fc6a72741c2e.GetLayerNeuronCount(num4); layerOutput = this._x87a7fc6a72741c2e.Structure.Flat.LayerOutput[0]; num = x61830ac74d65acc3.Ideal[0] - layerOutput; num8 = 0; while (true) { while (num8 >= layerTotalNeuronCount) { if (((uint) layerOutput) > uint.MaxValue) { goto Label_00CE; } if ((((uint) num2) + ((uint) num10)) >= 0) { goto Label_0039; } if ((((uint) num14) & 0) == 0) { goto Label_030C; } } double num9 = this._x87a7fc6a72741c2e.GetLayerOutput(num3, num8); if (((uint) layerOutput) > uint.MaxValue) { goto Label_0106; } this._xbdeab667c25bbc32[this._x4c51ad74d6bcc9e9][this._x82d75873c9eb7116++] = xd3eb00c1c38e3a49(this._x87a7fc6a72741c2e.GetActivation(num4), layerOutput) * num9; num8++; } }
/// <summary> /// Determine if the specified object is in one of the clusters. /// </summary> /// <param name="o">The object to check.</param> /// <param name="x">The cluster.</param> /// <returns>True if the object is in the cluster.</returns> public bool IsInCluster(IMLDataPair o, int x) { return Cluster(o) == x; }
/// <summary> /// Construct the LMA object. /// </summary> /// /// <param name="network">The network to train. Must have a single output neuron.</param> /// <param name="training">The training data to use. Must be indexable.</param> public LevenbergMarquardtTraining(BasicNetwork network, IMLDataSet training) : base(TrainingImplementationType.Iterative) { ValidateNetwork.ValidateMethodToData(network, training); if (network.OutputCount != 1) { throw new TrainingError( "Levenberg Marquardt requires an output layer with a single neuron."); } Training = training; _indexableTraining = Training; _network = network; _trainingLength = (int) _indexableTraining.Count; _parametersLength = _network.Structure.CalculateSize(); _hessianMatrix = new Matrix(_parametersLength, _parametersLength); _hessian = _hessianMatrix.Data; _alpha = 0.0d; _beta = 1.0d; _lambda = 0.1d; _deltas = new double[_parametersLength]; _gradient = new double[_parametersLength]; _diagonal = new double[_parametersLength]; var input = new BasicMLData( _indexableTraining.InputSize); var ideal = new BasicMLData( _indexableTraining.IdealSize); _pair = new BasicMLDataPair(input, ideal); }
public IMLDataPair RestoreDataVector(IMLDataPair vectorToProcess) { return this.xd3764d4f1e921081.RestoreDataVector(vectorToProcess); }
/// <summary> /// Not supported. /// </summary> /// <param name="inputData">Not used.</param> public void Add(IMLDataPair inputData) { throw new TrainingError(AddNotSupported); }
public void GetRecord(long index, IMLDataPair pair) { this._x51176d6d4e8e34fa.GetRecord(this.CurrentFoldOffset + index, pair); }
public IMLDataPair ProcessDataVector(IMLDataPair vectorToProcess) { return new BasicMLDataPair(this.ProcessInputVector(vectorToProcess.Input), this.ProcessIdealVector(vectorToProcess.Ideal)); }
public IMLDataPair RestoreDataVector(IMLDataPair vectorToProcess) { return vectorToProcess; }
/// <summary> /// Process one training set element. /// </summary> /// <param name="outputNeuron">The output neuron.</param> /// <param name="derivative">The derivatives.</param> /// <param name="pair">The training pair.</param> private void Process(int outputNeuron, double[] derivative, IMLDataPair pair) { _flat.Compute(pair.Input, _actual); double e = pair.Ideal[outputNeuron] - _actual[outputNeuron]; _error += e*e; for (int i = 0; i < _actual.Length; i++) { if (i == outputNeuron) { _layerDelta[i] = _flat.ActivationFunctions[0] .DerivativeFunction(_layerSums[i], _layerOutput[i]); } else { _layerDelta[i] = 0; } } for (int i = _flat.BeginTraining; i < _flat.EndTraining; i++) { ProcessLevel(i, derivative); } // calculate gradients for (int j = 0; j < _weights.Length; j++) { _gradients[j] += e*derivative[j]; _totDeriv[j] += derivative[j]; } // update hessian for (int i = 0; i < _weightCount; i++) { for (int j = 0; j < _weightCount; j++) { _hessian[i][j] += derivative[i] * derivative[j]; } } }
public IMLDataPair RestoreDataVector(IMLDataPair vectorToProcess) { return new BasicMLDataPair(this.RestoreInputVector(vectorToProcess.Input), this.RestoreIdealVector(vectorToProcess.Ideal)); }