/// <summary> /// Copy whatever dataset type is specified into a memory dataset. /// </summary> /// /// <param name="set">The dataset to copy.</param> public BasicMLDataSet(IMLDataSet set) { _data = new List <IMLDataPair>(); int inputCount = set.InputSize; int idealCount = set.IdealSize; foreach (IMLDataPair pair in set) { BasicMLData input = null; BasicMLData ideal = null; if (inputCount > 0) { input = new BasicMLData(inputCount); EngineArray.ArrayCopy(pair.InputArray, input.Data); } if (idealCount > 0) { ideal = new BasicMLData(idealCount); EngineArray.ArrayCopy(pair.IdealArray, ideal.Data); } Add(new BasicMLDataPair(input, ideal)); } }
/// <summary> /// Clone into the flat network passed in. /// </summary> /// /// <param name="result">The network to copy into.</param> public void CloneFlatNetwork(FlatNetwork result) { result._inputCount = _inputCount; result._layerCounts = EngineArray.ArrayCopy(_layerCounts); result._layerIndex = EngineArray.ArrayCopy(_layerIndex); result._layerOutput = EngineArray.ArrayCopy(_layerOutput); result._layerSums = EngineArray.ArrayCopy(_layerSums); result._layerFeedCounts = EngineArray.ArrayCopy(_layerFeedCounts); result._contextTargetOffset = EngineArray .ArrayCopy(_contextTargetOffset); result._contextTargetSize = EngineArray .ArrayCopy(_contextTargetSize); result._layerContextCount = EngineArray .ArrayCopy(_layerContextCount); result._biasActivation = EngineArray.ArrayCopy(_biasActivation); result._outputCount = _outputCount; result._weightIndex = _weightIndex; result._weights = _weights; result._activationFunctions = new IActivationFunction[_activationFunctions.Length]; for (int i = 0; i < result._activationFunctions.Length; i++) { result._activationFunctions[i] = (IActivationFunction)_activationFunctions[i].Clone(); } result._beginTraining = _beginTraining; result._endTraining = _endTraining; }
/// <summary> /// Resume training. /// </summary> /// <param name="state">The training state to return to.</param> public override void Resume(TrainingContinuation state) { if (!IsValidResume(state)) { throw new TrainingError("Invalid training resume data length"); } double[] lastGradient = (double[])state [ResilientPropagation.LAST_GRADIENTS]; double[] updateValues = (double[])state [ResilientPropagation.UPDATE_VALUES]; if (this.FlatTraining is TrainFlatNetworkResilient) { EngineArray.ArrayCopy(lastGradient, ((TrainFlatNetworkResilient)this.FlatTraining) .LastGradient); EngineArray.ArrayCopy(updateValues, ((TrainFlatNetworkResilient)this.FlatTraining) .UpdateValues); } #if !SILVERLIGHT else if (this.FlatTraining is TrainFlatNetworkOpenCL) { EngineArray.ArrayCopy(lastGradient, ((TrainFlatNetworkOpenCL)this .FlatTraining).LastGradient); EngineArray.ArrayCopy(updateValues, ((TrainFlatNetworkOpenCL)this .FlatTraining).UpdateValues); } #endif }
/// <inheritdoc /> public virtual void Iteration() { this.iteration++; CalculateGradients(); if (this.network.Limited) { LearnLimited(); } else { Learn(); } /* foreach */ foreach (IFlatGradientWorker worker in this.workers) { EngineArray.ArrayCopy(this.network.Weights, 0, worker.Weights, 0, this.network.Weights.Length); } CopyContexts(); if (this.reportedException != null) { throw (new EncogEngineError(this.reportedException)); } }
/// <summary> /// Construct the training object. /// </summary> /// /// <param name="network">The network to train.</param> /// <param name="training">The training data to use.</param> public TrainFlatNetworkSCG(FlatNetwork network, IEngineDataSet training) : base(network, training) { this.success = true; this.success = true; this.delta = 0; this.lambda2 = 0; this.lambda = TrainFlatNetworkSCG.FIRST_LAMBDA; this.oldError = 0; this.magP = 0; this.restart = false; this.weights = EngineArray.ArrayCopy(network.Weights); int numWeights = this.weights.Length; // this.gradients = new double[numWeights]; this.oldWeights = new double[numWeights]; this.oldGradient = new double[numWeights]; this.p = new double[numWeights]; this.r = new double[numWeights]; this.shouldInit = true; }
/// <summary> /// Called internally to read a large array. /// </summary> /// <param name="line">The line containing the beginning of a large array.</param> /// <returns>The array read.</returns> private double[] ReadLargeArray(String line) { String str = line.Substring(9); int l = int.Parse(str); double[] result = new double[l]; int index = 0; while ((line = this.reader.ReadLine()) != null) { line = line.Trim(); // is it a comment if (line.StartsWith("//")) { continue; } else if (line.StartsWith("##end")) { break; } double[] t = NumberList.FromList(CSVFormat.EgFormat, line); EngineArray.ArrayCopy(t, 0, result, index, t.Length); index += t.Length; } return(result); }
/// <summary> /// Clone a flat network. /// </summary> /// <param name="result">The cloned flat network.</param> public void CloneFlatNetwork(FlatNetwork result) { result.inputCount = this.inputCount; result.layerCounts = EngineArray.ArrayCopy(this.layerCounts); result.layerIndex = EngineArray.ArrayCopy(this.layerIndex); result.layerOutput = EngineArray.ArrayCopy(this.layerOutput); result.layerFeedCounts = EngineArray.ArrayCopy(this.layerFeedCounts); result.contextTargetOffset = EngineArray .ArrayCopy(this.contextTargetOffset); result.contextTargetSize = EngineArray .ArrayCopy(this.contextTargetSize); result.layerContextCount = EngineArray .ArrayCopy(this.layerContextCount); result.biasActivation = EngineArray.ArrayCopy(this.biasActivation); result.outputCount = this.outputCount; result.weightIndex = this.weightIndex; result.weights = this.weights; result.activationFunctions = new IActivationFunction[this.activationFunctions.Length]; for (int i = 0; i < result.activationFunctions.Length; i++) { result.activationFunctions[i] = (IActivationFunction)this.activationFunctions[i].Clone(); } result.beginTraining = this.beginTraining; result.endTraining = this.endTraining; }
/// <inheritdoc/> public virtual void Iteration() { _iteration++; CalculateGradients(); if (_network.Limited) { LearnLimited(); } else { Learn(); } foreach (GradientWorker worker in _workers) { EngineArray.ArrayCopy(_network.Weights, 0, worker.Weights, 0, _network.Weights.Length); } if (_network.HasContext) { CopyContexts(); } if (_reportedException != null) { throw (new EncogError(_reportedException)); } }
/// <inheritdoc/> public void Compute(double[] input, double[] output) { BasicNeuralData input2 = new BasicNeuralData(input); INeuralData output2 = this.Compute(input2); EngineArray.ArrayCopy(output2.Data, output); }
/// <summary> /// Compute the output for this network. /// </summary> /// /// <param name="input">The input.</param> /// <param name="output">The output.</param> public void Compute(double[] input, double[] output) { var input2 = new BasicMLData(input); IMLData output2 = Compute(input2); EngineArray.ArrayCopy(output2.Data, output); }
/// <summary> /// Unflatten the weights, copy the flat network weights to the /// neural network weight matrixes. /// </summary> public void UnflattenWeights() { if (flat != null) { double[] sourceWeights = flat.Weights; NetworkCODEC.ArrayToNetwork(sourceWeights, network); this.flatUpdate = FlatUpdateNeeded.None; // update context layers foreach (ILayer layer in this.layers) { if (layer is ContextLayer) { ContextLayer context = (ContextLayer)layer; if (context.FlatContextIndex != -1) { EngineArray.ArrayCopy( this.flat.LayerOutput, context.FlatContextIndex, context.Context.Data, 0, context.Context.Count); } } } } }
/// <summary> /// Determine the class using part of an array. /// </summary> /// /// <param name="pos">The position to begin.</param> /// <param name="data">The array to check.</param> /// <returns>The class item.</returns> public ClassItem DetermineClass(int pos, double[] data) { int resultIndex = 0; var d = new double[ColumnsNeeded]; EngineArray.ArrayCopy(data, pos, d, 0, d.Length); switch (_action) { case NormalizationAction.Equilateral: resultIndex = _eq.Decode(d); break; case NormalizationAction.OneOf: resultIndex = EngineArray.IndexOfLargest(d); break; case NormalizationAction.SingleField: resultIndex = (int)d[0]; break; default: throw new AnalystError("Invalid action: " + _action); } if (resultIndex < 0) { return(null); } return(_classes[resultIndex]); }
/// <inheritdoc/> public override void Copy(IGenome source) { DoubleArrayGenome sourceDouble = (DoubleArrayGenome)source; EngineArray.ArrayCopy(sourceDouble.data, this.data); Score = source.Score; AdjustedScore = source.AdjustedScore; }
/// <inheritdoc/> public void Write(double[] input, double[] ideal) { INeuralDataPair pair = BasicNeuralDataPair.CreatePair(inputSize, idealSize); EngineArray.ArrayCopy(input, pair.Input.Data); EngineArray.ArrayCopy(ideal, pair.Ideal.Data); }
/// <summary> /// Add a bar. /// </summary> /// <param name="d">The bar data.</param> public void Add(double[] d) { data.Insert(0, EngineArray.ArrayCopy(d)); if (data.Count > periods) { data.RemoveAt(data.Count - 1); } }
/// <inheritdoc/> public override void Copy(IGenome source) { IntegerArrayGenome sourceInt = (IntegerArrayGenome)source; EngineArray.ArrayCopy(sourceInt.data, this.data); this.Score = source.Score; this.AdjustedScore = source.AdjustedScore; }
/// <summary> /// Construct a continuous distribution. /// </summary> /// <param name="mean">The mean.</param> /// <param name="covariance">The covariance.</param> public ContinousDistribution(double[] mean, double[][] covariance) { _dimension = covariance.Length; _mean = EngineArray.ArrayCopy(mean); _covariance = new Matrix(covariance); Update(covariance); }
public static void NormalizeSunspots(double lo, double hi) { NormalizeArray norm = new NormalizeArray { NormalizedLow = lo, NormalizedHigh = hi }; _normalizedSunspots = norm.Process(SUNSPOTS); _closedLoopSunspots = EngineArray.ArrayCopy(_normalizedSunspots); }
/// <inheritdoc/> public void Write(double[] input, double[] ideal, double significance) { IMLDataPair pair = BasicMLDataPair.CreatePair(_inputSize, _idealSize); EngineArray.ArrayCopy(input, pair.Input.Data); EngineArray.ArrayCopy(ideal, pair.Ideal.Data); pair.Significance = significance; }
public void NormalizeForexPair(double lo, double hi) { array = new NormalizeArray { NormalizedHigh = hi, NormalizedLow = lo }; // create arrays to hold the normalized forex pair data _normalizedForexPair = array.Process(ForexPair); _closedLoopForexPair = EngineArray.ArrayCopy(_normalizedForexPair); }
/// <summary> /// Note: for Boltzmann networks, you will usually want to call the "run" /// method to compute the output. /// This method can be used to copy the input data to the current state. A /// single iteration is then run, and the new current state is returned. /// </summary> /// /// <param name="input">The input pattern.</param> /// <returns>The new current state.</returns> public override sealed IMLData Compute(IMLData input) { var result = new BiPolarMLData(input.Count); input.CopyTo(CurrentState.Data, 0, input.Count); Run(); EngineArray.ArrayCopy(CurrentState.Data, result.Data); return(result); }
/// <summary> /// Calculate the output for the given input. /// </summary> /// /// <param name="input">The input.</param> /// <param name="output">Output will be placed here.</param> public virtual void Compute(double[] input, double[] output) { int sourceIndex = _layerOutput.Length - _layerCounts[_layerCounts.Length - 1]; EngineArray.ArrayCopy(input, 0, _layerOutput, sourceIndex, _inputCount); InnerCompute(output); }
/// <summary> /// Write one record of data to a CSV file. /// </summary> /// <param name="input">The input data array.</param> /// <param name="ideal">The ideal data array.</param> public void Write(double[] input, double[] ideal) { double[] record = new double[input.Length + ideal.Length]; EngineArray.ArrayCopy(input, record); EngineArray.ArrayCopy(ideal, 0, record, input.Length, ideal.Length); StringBuilder result = new StringBuilder(); NumberList.ToList(this.format, result, record); this.output.WriteLine(result.ToString()); }
public void NormalizeSunspots(double lo, double hi) { var norm = new NormalizeArray { NormalizedHigh = hi, NormalizedLow = lo }; // create arrays to hold the normalized sunspots _normalizedSunspots = norm.Process(Sunspots); _closedLoopSunspots = EngineArray.ArrayCopy(_normalizedSunspots); }
/// <inheritdoc/> public virtual void Iteration(int iterations) { if (this.learningType == -1) { throw new EncogEngineError( "Learning type has not been defined yet, you must first call one of the learnXXXX methods, such as learnRPROP."); } this.iteration += iterations; int currentIndex = 0; this.error = 0; int count = this.profile.KernelNumberOfCalls; // If we are using an OpenCL ratio other than 1.0, which means that we are // braining up a single training iteration, there is no reason to try and batch // up multiple iterations. if (count > 0 && iterations > 1) { throw new EncogEngineError( "Must use an OpenCL ratio of 1.0 if you are going to use an iteration count > 1."); } this.kernel.GlobalWork = this.profile.KernelGlobalWorkgroup; this.kernel.LocalWork = this.profile.KernelLocalWorkgroup; // handle workloads while (count > 0) { CallKernel(currentIndex, this.profile.KernelWorkPerCall, false, 1); count--; currentIndex += (int)(this.profile.KernelWorkPerCall * this.kernel.GlobalWork); } // handle the final workload this.kernel.GlobalWork = this.profile.KernelRemainderGlobal; this.kernel.LocalWork = this.profile.KernelRemainderGlobal; CallKernel(currentIndex, this.profile.KernelRemainderPer, true, iterations); count = (int)this.training.Count; this.error = this.error / (count * this.training.IdealSize); if (Util.ErrorCalculation.Mode == Util.ErrorCalculationMode.RMS) { this.error = Math.Sqrt(this.error); } EngineArray.ArrayCopy(this.kernel.WeightOutArray, this.network.Weights); }
public static void normalizeSunspots(double lo, double hi) { NormalizeArray norm = new NormalizeArray(); norm.NormalizedHigh = (hi); norm.NormalizedLow = lo; // create arrays to hold the normalized sunspots normalizedSunspots = norm.Process(SUNSPOTS); closedLoopSunspots = EngineArray.ArrayCopy(normalizedSunspots); }
/// <summary> /// Resume training. /// </summary> /// /// <param name="state">The training state to return to.</param> public override sealed void Resume(TrainingContinuation state) { if (!IsValidResume(state)) { throw new TrainingError("Invalid training resume data length"); } var lastGradient = (double[])state.Get(LastGradientsConst); var updateValues = (double[])state.Get(UpdateValuesConst); EngineArray.ArrayCopy(lastGradient, LastGradient); EngineArray.ArrayCopy(updateValues, _updateValues); }
/// <inheritdoc/> public bool Read(double[] input, double[] ideal, ref double significance) { if (_index >= _input.Length) { return(false); } EngineArray.ArrayCopy(_input[_index], input); EngineArray.ArrayCopy(_ideal[_index], ideal); _index++; significance = 1.0; return(true); }
/// <summary> /// Resume training. /// </summary> /// <param name="state">The training state to return to.</param> public override void Resume(TrainingContinuation state) { if (!IsValidResume(state)) { throw new TrainingError("Invalid training resume data length"); } var lastGradient = (double[])state.Contents[ LastGradients]; EngineArray.ArrayCopy(lastGradient, LastGradient); }
object ICloneable.Clone() { var result = new HiddenMarkovModel(StateCount); EngineArray.ArrayCopy(Pi, result.Pi); EngineArray.ArrayCopy(TransitionProbability, result.TransitionProbability); for (int i = 0; i < StateCount; i++) { result.StateDistributions[i] = StateDistributions[i].Clone(); } return(result); }