/// <summary> /// Evaluate memory. /// </summary> private void EvalMemory() { BasicMLDataSet training = RandomTrainingFactory.Generate( 1000, 10000, 10, 10, -1, 1); const long stop = (10 * Evaluate.Milis); int record = 0; IMLDataPair pair = BasicMLDataPair.CreatePair(10, 10); int iterations = 0; var watch = new Stopwatch(); watch.Start(); while (watch.ElapsedMilliseconds < stop) { iterations++; training.GetRecord(record++, pair); if (record >= training.Count) { record = 0; } } iterations /= 100000; _report.Report(Steps, Step2, "Memory dataset, result: " + Format.FormatInteger(iterations)); _memoryScore = iterations; }
/// <summary> /// Construct a gradient worker. /// </summary> /// /// <param name="theNetwork">The network to train.</param> /// <param name="theOwner">The owner that is doing the training.</param> /// <param name="theTraining">The training data.</param> /// <param name="theLow">The low index to use in the training data.</param> /// <param name="theHigh">The high index to use in the training data.</param> /// <param name="theFlatSpots">Holds an array of flat spot constants.</param> public GradientWorker(FlatNetwork theNetwork, Propagation theOwner, IMLDataSet theTraining, int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef) { _errorCalculation = new ErrorCalculation(); _network = theNetwork; _training = theTraining; _low = theLow; _high = theHigh; _owner = theOwner; _flatSpot = theFlatSpots; _layerDelta = new double[_network.LayerOutput.Length]; _gradients = new double[_network.Weights.Length]; _actual = new double[_network.OutputCount]; _weights = _network.Weights; _layerIndex = _network.LayerIndex; _layerCounts = _network.LayerCounts; _weightIndex = _network.WeightIndex; _layerOutput = _network.LayerOutput; _layerSums = _network.LayerSums; _layerFeedCounts = _network.LayerFeedCounts; _ef = ef; _pair = BasicMLDataPair.CreatePair(_network.InputCount, _network.OutputCount); }
/// <inheritdoc/> public void Write(double[] input, double[] ideal, double significance) { IMLDataPair pair = BasicMLDataPair.CreatePair(_inputSize, _idealSize); EngineArray.ArrayCopy(input, pair.Input.Data); EngineArray.ArrayCopy(ideal, pair.Ideal.Data); pair.Significance = significance; }
/// <summary> /// Evaluate disk. /// </summary> private void EvalBinary() { FileInfo file = FileUtil.CombinePath(new FileInfo(Path.GetTempPath()), "temp.egb"); BasicMLDataSet training = RandomTrainingFactory.Generate( 1000, 10000, 10, 10, -1, 1); // create the binary file if (file.Exists) { file.Delete(); } var training2 = new BufferedMLDataSet(file.ToString()); training2.Load(training); const long stop = (10 * Evaluate.Milis); int record = 0; IMLDataPair pair = BasicMLDataPair.CreatePair(10, 10); var watch = new Stopwatch(); watch.Start(); int iterations = 0; while (watch.ElapsedMilliseconds < stop) { iterations++; training2.GetRecord(record++, pair); if (record >= training2.Count) { record = 0; } } training2.Close(); iterations /= 100000; _report.Report(Steps, Step3, "Disk(binary) dataset, result: " + Format.FormatInteger(iterations)); if (file.Exists) { file.Delete(); } _binaryScore = iterations; }
/// <summary> /// Move to the next record. /// </summary> /// <returns>True, if we were able to move to the next record.</returns> public bool MoveNext() { if (HasNext()) { IMLDataPair pair = BasicMLDataPair.CreatePair( _owner.InputSize, _owner.IdealSize); _owner.GetRecord(_currentIndex++, pair); _currentPair = pair; return(true); } _currentPair = null; return(false); }
/// <summary> /// Get the minimum, over all the data, for the specified index. /// </summary> /// /// <param name="index">An index into the input data.</param> /// <returns>The minimum value.</returns> private double GetMinValue(int index) { double result = Double.MaxValue; long count = _set.Count; IMLDataPair pair = BasicMLDataPair.CreatePair( _set.InputSize, _set.IdealSize); for (int i = 0; i < count; i++) { _set.GetRecord(index, pair); result = Math.Min(result, pair.InputArray[index]); } return(result); }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(IMLDataSet data) { var errorCalculation = new ErrorCalculation(); var actual = new double[_outputCount]; IMLDataPair pair = BasicMLDataPair.CreatePair(data.InputSize, data.IdealSize); for (int i = 0; i < data.Count; i++) { data.GetRecord(i, pair); Compute(pair.InputArray, actual); errorCalculation.UpdateError(actual, pair.IdealArray, pair.Significance); } return(errorCalculation.Calculate()); }
/// <summary> /// Move to the next element. /// </summary> /// <returns>True if there are more elements to read.</returns> public bool MoveNext() { try { if (_current >= _data.Count) { return(false); } _currentRecord = BasicMLDataPair.CreatePair(_data .InputSize, _data.IdealSize); _data.GetRecord(_current++, _currentRecord); return(true); } catch (EndOfStreamException) { return(false); } }
/// <summary> /// Construct the chain rule worker. /// </summary> /// <param name="theNetwork">The network to calculate a Hessian for.</param> /// <param name="theTraining">The training data.</param> /// <param name="theLow">The low range.</param> /// <param name="theHigh">The high range.</param> public ChainRuleWorker(FlatNetwork theNetwork, IMLDataSet theTraining, int theLow, int theHigh) { int weightCount = theNetwork.Weights.Length; _training = theTraining; _flat = theNetwork; _layerDelta = new double[_flat.LayerOutput.Length]; _actual = new double[_flat.OutputCount]; _derivative = new double[weightCount]; _totDeriv = new double[weightCount]; _gradients = new double[weightCount]; _weights = _flat.Weights; _layerIndex = _flat.LayerIndex; _layerCounts = _flat.LayerCounts; _weightIndex = _flat.WeightIndex; _layerOutput = _flat.LayerOutput; _layerSums = _flat.LayerSums; _layerFeedCounts = _flat.LayerFeedCounts; _low = theLow; _high = theHigh; _pair = BasicMLDataPair.CreatePair(_flat.InputCount, _flat.OutputCount); }
/// <summary> /// Compute the derivative for target data. /// </summary> /// /// <param name="input">The input.</param> /// <param name="target">The target data.</param> /// <returns>The output.</returns> public IMLData ComputeDeriv(IMLData input, IMLData target) { int pop, ivar; int ibest = 0; int outvar; double dist, truedist; double vtot, wtot; double temp, der1, der2, psum; int vptr, wptr, vsptr = 0, wsptr = 0; var xout = new double[_network.OutputCount]; for (pop = 0; pop < _network.OutputCount; pop++) { xout[pop] = 0.0d; for (ivar = 0; ivar < _network.InputCount; ivar++) { _v[pop * _network.InputCount + ivar] = 0.0d; _w[pop * _network.InputCount + ivar] = 0.0d; } } psum = 0.0d; if (_network.OutputMode != PNNOutputMode.Classification) { vsptr = _network.OutputCount * _network.InputCount; wsptr = _network.OutputCount * _network.InputCount; for (ivar = 0; ivar < _network.InputCount; ivar++) { _v[vsptr + ivar] = 0.0d; _w[wsptr + ivar] = 0.0d; } } IMLDataPair pair = BasicMLDataPair.CreatePair(_network.Samples.InputSize, _network.Samples.IdealSize); for (int r = 0; r < _network.Samples.Count; r++) { _network.Samples.GetRecord(r, pair); if (r == _network.Exclude) { continue; } dist = 0.0d; for (ivar = 0; ivar < _network.InputCount; ivar++) { double diff = input[ivar] - pair.Input[ivar]; diff /= _network.Sigma[ivar]; _dsqr[ivar] = diff * diff; dist += _dsqr[ivar]; } if (_network.Kernel == PNNKernelType.Gaussian) { dist = Math.Exp(-dist); } else if (_network.Kernel == PNNKernelType.Reciprocal) { dist = 1.0d / (1.0d + dist); } truedist = dist; if (dist < 1.0e-40d) { dist = 1.0e-40d; } if (_network.OutputMode == PNNOutputMode.Classification) { pop = (int)pair.Ideal[0]; xout[pop] += dist; vptr = pop * _network.InputCount; wptr = pop * _network.InputCount; for (ivar = 0; ivar < _network.InputCount; ivar++) { temp = truedist * _dsqr[ivar]; _v[vptr + ivar] += temp; _w[wptr + ivar] += temp * (2.0d * _dsqr[ivar] - 3.0d); } } else if (_network.OutputMode == PNNOutputMode.Unsupervised) { for (ivar = 0; ivar < _network.InputCount; ivar++) { xout[ivar] += dist * pair.Input[ivar]; temp = truedist * _dsqr[ivar]; _v[vsptr + ivar] += temp; _w[wsptr + ivar] += temp * (2.0d * _dsqr[ivar] - 3.0d); } vptr = 0; wptr = 0; for (outvar = 0; outvar < _network.OutputCount; outvar++) { for (ivar = 0; ivar < _network.InputCount; ivar++) { temp = truedist * _dsqr[ivar] * pair.Input[ivar]; _v[vptr++] += temp; _w[wptr++] += temp * (2.0d * _dsqr[ivar] - 3.0d); } } psum += dist; } else if (_network.OutputMode == PNNOutputMode.Regression) { for (ivar = 0; ivar < _network.OutputCount; ivar++) { xout[ivar] += dist * pair.Ideal[ivar]; } vptr = 0; wptr = 0; for (outvar = 0; outvar < _network.OutputCount; outvar++) { for (ivar = 0; ivar < _network.InputCount; ivar++) { temp = truedist * _dsqr[ivar] * pair.Ideal[outvar]; _v[vptr++] += temp; _w[wptr++] += temp * (2.0d * _dsqr[ivar] - 3.0d); } } for (ivar = 0; ivar < _network.InputCount; ivar++) { temp = truedist * _dsqr[ivar]; _v[vsptr + ivar] += temp; _w[wsptr + ivar] += temp * (2.0d * _dsqr[ivar] - 3.0d); } psum += dist; } } if (_network.OutputMode == PNNOutputMode.Classification) { psum = 0.0d; for (pop = 0; pop < _network.OutputCount; pop++) { if (_network.Priors[pop] >= 0.0d) { xout[pop] *= _network.Priors[pop] / _network.CountPer[pop]; } psum += xout[pop]; } if (psum < 1.0e-40d) { psum = 1.0e-40d; } } for (pop = 0; pop < _network.OutputCount; pop++) { xout[pop] /= psum; } for (ivar = 0; ivar < _network.InputCount; ivar++) { if (_network.OutputMode == PNNOutputMode.Classification) { vtot = wtot = 0.0d; } else { vtot = _v[vsptr + ivar] * 2.0d / (psum * _network.Sigma[ivar]); wtot = _w[wsptr + ivar] * 2.0d / (psum * _network.Sigma[ivar] * _network.Sigma[ivar]); } for (outvar = 0; outvar < _network.OutputCount; outvar++) { if ((_network.OutputMode == PNNOutputMode.Classification) && (_network.Priors[outvar] >= 0.0d)) { _v[outvar * _network.InputCount + ivar] *= _network.Priors[outvar] / _network.CountPer[outvar]; _w[outvar * _network.InputCount + ivar] *= _network.Priors[outvar] / _network.CountPer[outvar]; } _v[outvar * _network.InputCount + ivar] *= 2.0d / (psum * _network.Sigma[ivar]); _w[outvar * _network.InputCount + ivar] *= 2.0d / (psum * _network.Sigma[ivar] * _network.Sigma[ivar]); if (_network.OutputMode == PNNOutputMode.Classification) { vtot += _v[outvar * _network.InputCount + ivar]; wtot += _w[outvar * _network.InputCount + ivar]; } } for (outvar = 0; outvar < _network.OutputCount; outvar++) { der1 = _v[outvar * _network.InputCount + ivar] - xout[outvar] * vtot; der2 = _w[outvar * _network.InputCount + ivar] + 2.0d * xout[outvar] * vtot * vtot - 2.0d * _v[outvar * _network.InputCount + ivar] * vtot - xout[outvar] * wtot; if (_network.OutputMode == PNNOutputMode.Classification) { if (outvar == (int)target[0]) { temp = 2.0d * (xout[outvar] - 1.0d); } else { temp = 2.0d * xout[outvar]; } } else { temp = 2.0d * (xout[outvar] - target[outvar]); } _network.Deriv[ivar] += temp * der1; _network.Deriv2[ivar] += temp * der2 + 2.0d * der1 * der1; } } if (_network.OutputMode == PNNOutputMode.Classification) { IMLData result = new BasicMLData(1); result[0] = ibest; return(result); } return(null); }
/// <summary> /// Calculate the error for the entire training set. /// </summary> /// /// <param name="training">Training set to use.</param> /// <param name="deriv">Should we find the derivative.</param> /// <returns>The error.</returns> public double CalculateError(IMLDataSet training, bool deriv) { double totErr; double diff; totErr = 0.0d; if (deriv) { int num = (_network.SeparateClass) ? _network.InputCount * _network.OutputCount : _network.InputCount; for (int i = 0; i < num; i++) { _network.Deriv[i] = 0.0d; _network.Deriv2[i] = 0.0d; } } _network.Exclude = (int)training.Count; IMLDataPair pair = BasicMLDataPair.CreatePair( training.InputSize, training.IdealSize); var xout = new double[_network.OutputCount]; for (int r = 0; r < training.Count; r++) { training.GetRecord(r, pair); _network.Exclude = _network.Exclude - 1; double err = 0.0d; IMLData input = pair.Input; IMLData target = pair.Ideal; if (_network.OutputMode == PNNOutputMode.Unsupervised) { if (deriv) { IMLData output = ComputeDeriv(input, target); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } else { IMLData output = _network.Compute(input); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } for (int i = 0; i < _network.OutputCount; i++) { diff = input[i] - xout[i]; err += diff * diff; } } else if (_network.OutputMode == PNNOutputMode.Classification) { var tclass = (int)target[0]; IMLData output; if (deriv) { output = ComputeDeriv(input, pair.Ideal); //output_4.GetData(0); //**FIX**? } else { output = _network.Compute(input); //output_4.GetData(0); **FIX**? } xout[0] = output[0]; for (int i = 0; i < xout.Length; i++) { if (i == tclass) { diff = 1.0d - xout[i]; err += diff * diff; } else { err += xout[i] * xout[i]; } } } else if (_network.OutputMode == PNNOutputMode.Regression) { if (deriv) { IMLData output = _network.Compute(input); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } else { IMLData output = _network.Compute(input); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } for (int i = 0; i < _network.OutputCount; i++) { diff = target[i] - xout[i]; err += diff * diff; } } totErr += err; } _network.Exclude = -1; _network.Error = totErr / training.Count; if (deriv) { for (int i = 0; i < _network.Deriv.Length; i++) { _network.Deriv[i] /= training.Count; _network.Deriv2[i] /= training.Count; } } if ((_network.OutputMode == PNNOutputMode.Unsupervised) || (_network.OutputMode == PNNOutputMode.Regression)) { _network.Error = _network.Error / _network.OutputCount; if (deriv) { for (int i = 0; i < _network.InputCount; i++) { _network.Deriv[i] /= _network.OutputCount; _network.Deriv2[i] /= _network.OutputCount; } } } return(_network.Error); }