private NeuralNetworkEnsembleModel(bool deserializing) : base(deserializing) { if (deserializing) { mlpEnsemble = new alglib.mlpensemble(); } }
public NeuralNetworkEnsembleModel(alglib.mlpensemble mlpEnsemble, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null) : base() { this.name = ItemName; this.description = ItemDescription; this.mlpEnsemble = mlpEnsemble; this.targetVariable = targetVariable; this.allowedInputVariables = allowedInputVariables.ToArray(); if (classValues != null) this.classValues = (double[])classValues.Clone(); }
private NeuralNetworkEnsembleModel(NeuralNetworkEnsembleModel original, Cloner cloner) : base(original, cloner) { mlpEnsemble = new alglib.mlpensemble(); string serializedEnsemble; alglib.mlpeserialize(original.mlpEnsemble, out serializedEnsemble); alglib.mlpeunserialize(serializedEnsemble, out this.mlpEnsemble); targetVariable = original.targetVariable; allowedInputVariables = (string[])original.allowedInputVariables.Clone(); if (original.classValues != null) this.classValues = (double[])original.classValues.Clone(); }
public NeuralNetworkEnsembleModel(alglib.mlpensemble mlpEnsemble, string targetVariable, IEnumerable <string> allowedInputVariables, double[] classValues = null) : base() { this.name = ItemName; this.description = ItemDescription; this.mlpEnsemble = mlpEnsemble; this.targetVariable = targetVariable; this.allowedInputVariables = allowedInputVariables.ToArray(); if (classValues != null) { this.classValues = (double[])classValues.Clone(); } }
private NeuralNetworkEnsembleModel(NeuralNetworkEnsembleModel original, Cloner cloner) : base(original, cloner) { mlpEnsemble = new alglib.mlpensemble(); string serializedEnsemble; alglib.mlpeserialize(original.mlpEnsemble, out serializedEnsemble); alglib.mlpeunserialize(serializedEnsemble, out this.mlpEnsemble); targetVariable = original.targetVariable; allowedInputVariables = (string[])original.allowedInputVariables.Clone(); if (original.classValues != null) { this.classValues = (double[])original.classValues.Clone(); } }
public static IRegressionSolution CreateNeuralNetworkEnsembleRegressionSolution(IRegressionProblemData problemData, int ensembleSize, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts, out double rmsError, out double avgRelError) { var dataset = problemData.Dataset; string targetVariable = problemData.TargetVariable; IEnumerable <string> allowedInputVariables = problemData.AllowedInputVariables; IEnumerable <int> rows = problemData.TrainingIndices; double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows); if (inputMatrix.ContainsNanOrInfinity()) { throw new NotSupportedException("Neural network ensemble regression does not support NaN or infinity values in the input dataset."); } alglib.mlpensemble mlpEnsemble = null; if (nLayers == 0) { alglib.mlpecreate0(allowedInputVariables.Count(), 1, ensembleSize, out mlpEnsemble); } else if (nLayers == 1) { alglib.mlpecreate1(allowedInputVariables.Count(), nHiddenNodes1, 1, ensembleSize, out mlpEnsemble); } else if (nLayers == 2) { alglib.mlpecreate2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, ensembleSize, out mlpEnsemble); } else { throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers"); } alglib.mlpreport rep; int nRows = inputMatrix.GetLength(0); int info; alglib.mlpetraines(mlpEnsemble, inputMatrix, nRows, decay, restarts, out info, out rep); if (info != 6) { throw new ArgumentException("Error in calculation of neural network ensemble regression solution"); } rmsError = alglib.mlpermserror(mlpEnsemble, inputMatrix, nRows); avgRelError = alglib.mlpeavgrelerror(mlpEnsemble, inputMatrix, nRows); return(new NeuralNetworkEnsembleRegressionSolution(new NeuralNetworkEnsembleModel(mlpEnsemble, targetVariable, allowedInputVariables), (IRegressionProblemData)problemData.Clone())); }
private NeuralNetworkEnsembleModel(bool deserializing) : base(deserializing) { if (deserializing) mlpEnsemble = new alglib.mlpensemble(); }
public static IClassificationSolution CreateNeuralNetworkEnsembleClassificationSolution(IClassificationProblemData problemData, int ensembleSize, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts, out double rmsError, out double avgRelError, out double relClassError) { var dataset = problemData.Dataset; string targetVariable = problemData.TargetVariable; IEnumerable <string> allowedInputVariables = problemData.AllowedInputVariables; IEnumerable <int> rows = problemData.TrainingIndices; double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows); if (inputMatrix.Cast <double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) { throw new NotSupportedException("Neural network ensemble classification does not support NaN or infinity values in the input dataset."); } int nRows = inputMatrix.GetLength(0); int nFeatures = inputMatrix.GetLength(1) - 1; double[] classValues = dataset.GetDoubleValues(targetVariable).Distinct().OrderBy(x => x).ToArray(); int nClasses = classValues.Count(); // map original class values to values [0..nClasses-1] Dictionary <double, double> classIndices = new Dictionary <double, double>(); for (int i = 0; i < nClasses; i++) { classIndices[classValues[i]] = i; } for (int row = 0; row < nRows; row++) { inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]]; } alglib.mlpensemble mlpEnsemble = null; if (nLayers == 0) { alglib.mlpecreatec0(allowedInputVariables.Count(), nClasses, ensembleSize, out mlpEnsemble); } else if (nLayers == 1) { alglib.mlpecreatec1(allowedInputVariables.Count(), nHiddenNodes1, nClasses, ensembleSize, out mlpEnsemble); } else if (nLayers == 2) { alglib.mlpecreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, nClasses, ensembleSize, out mlpEnsemble); } else { throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers"); } alglib.mlpreport rep; int info; alglib.mlpetraines(mlpEnsemble, inputMatrix, nRows, decay, restarts, out info, out rep); if (info != 6) { throw new ArgumentException("Error in calculation of neural network ensemble classification solution"); } rmsError = alglib.mlpermserror(mlpEnsemble, inputMatrix, nRows); avgRelError = alglib.mlpeavgrelerror(mlpEnsemble, inputMatrix, nRows); relClassError = alglib.mlperelclserror(mlpEnsemble, inputMatrix, nRows); var problemDataClone = (IClassificationProblemData)problemData.Clone(); return(new NeuralNetworkEnsembleClassificationSolution(new NeuralNetworkEnsembleModel(mlpEnsemble, targetVariable, allowedInputVariables, problemDataClone.ClassValues.ToArray()), problemDataClone)); }
private NeuralNetworkEnsembleModel(StorableConstructorFlag _) : base(_) { mlpEnsemble = new alglib.mlpensemble(); }