private NeuralNetworkModel(bool deserializing)
     : base(deserializing)
 {
     if (deserializing)
     {
         multiLayerPerceptron = new alglib.multilayerperceptron();
     }
 }
 public NeuralNetworkModel(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
   : base(targetVariable) {
   this.name = ItemName;
   this.description = ItemDescription;
   this.multiLayerPerceptron = multiLayerPerceptron;
   this.allowedInputVariables = allowedInputVariables.ToArray();
   if (classValues != null)
     this.classValues = (double[])classValues.Clone();
 }
Exemple #3
0
 public NeuralNetworkModel(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable <string> allowedInputVariables, double[] classValues = null)
     : base(targetVariable)
 {
     this.name                  = ItemName;
     this.description           = ItemDescription;
     this.multiLayerPerceptron  = multiLayerPerceptron;
     this.allowedInputVariables = allowedInputVariables.ToArray();
     if (classValues != null)
     {
         this.classValues = (double[])classValues.Clone();
     }
 }
        /// <summary>
        /// Создает нейронную сеть с одним внутренним слоем
        /// с границами и без взависимомти от флага границ
        /// </summary>
        /// <param name="bisLogObj">Имя объекта</param>
        /// <param name="input">Количество входных нейронов</param>
        /// <param name="edge">Флаг границ</param>
        /// <param name="lowerEdge">Нижняя граница</param>
        /// <param name="upperEdge">Верхняя граница</param>
        private void Create(string bisLogObj, int input, bool edge, double lowerEdge, double upperEdge)
        {
            int layer = (int)(input * 1.2);

            name = String.Format("{0}.{1}.{2}.{3}.{4}", bisLogObj, input, layer, lowerEdge, upperEdge);
            net  = new alglib.multilayerperceptron();
            if (edge)
            {
                alglib.mlpcreater1(input, layer, 1, lowerEdge, upperEdge, out net);
            }
            else
            {
                alglib.mlpcreatec1(input, layer, 2, out net);
            }
        }
        public static IRegressionSolution CreateNeuralNetworkRegressionSolution(IRegressionProblemData problemData, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts,
                                                                                out double rmsError, out double avgRelError)
        {
            var    dataset        = problemData.Dataset;
            string targetVariable = problemData.TargetVariable;
            IEnumerable <string> allowedInputVariables = problemData.AllowedInputVariables;
            IEnumerable <int>    rows = problemData.TrainingIndices;

            double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows);
            if (inputMatrix.Cast <double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
            {
                throw new NotSupportedException("Neural network regression does not support NaN or infinity values in the input dataset.");
            }

            alglib.multilayerperceptron multiLayerPerceptron = null;
            if (nLayers == 0)
            {
                alglib.mlpcreate0(allowedInputVariables.Count(), 1, out multiLayerPerceptron);
            }
            else if (nLayers == 1)
            {
                alglib.mlpcreate1(allowedInputVariables.Count(), nHiddenNodes1, 1, out multiLayerPerceptron);
            }
            else if (nLayers == 2)
            {
                alglib.mlpcreate2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, out multiLayerPerceptron);
            }
            else
            {
                throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");
            }
            alglib.mlpreport rep;
            int nRows = inputMatrix.GetLength(0);

            int info;

            // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary
            alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out rep);
            if (info != 2)
            {
                throw new ArgumentException("Error in calculation of neural network regression solution");
            }

            rmsError    = alglib.mlprmserror(multiLayerPerceptron, inputMatrix, nRows);
            avgRelError = alglib.mlpavgrelerror(multiLayerPerceptron, inputMatrix, nRows);

            return(new NeuralNetworkRegressionSolution(new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables), (IRegressionProblemData)problemData.Clone()));
        }
 private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
   : base(original, cloner) {
   multiLayerPerceptron = new alglib.multilayerperceptron();
   multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone();
   multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
   multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone();
   multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone();
   multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone();
   multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone();
   multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone();
   multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone();
   multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone();
   multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
   multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
   allowedInputVariables = (string[])original.allowedInputVariables.Clone();
   if (original.classValues != null)
     this.classValues = (double[])original.classValues.Clone();
 }
Exemple #7
0
 private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
     : base(original, cloner)
 {
     multiLayerPerceptron = new alglib.multilayerperceptron();
     multiLayerPerceptron.innerobj.chunks       = (double[, ])original.multiLayerPerceptron.innerobj.chunks.Clone();
     multiLayerPerceptron.innerobj.columnmeans  = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
     multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone();
     multiLayerPerceptron.innerobj.derror       = (double[])original.multiLayerPerceptron.innerobj.derror.Clone();
     multiLayerPerceptron.innerobj.dfdnet       = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone();
     multiLayerPerceptron.innerobj.neurons      = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone();
     multiLayerPerceptron.innerobj.nwbuf        = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone();
     multiLayerPerceptron.innerobj.structinfo   = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone();
     multiLayerPerceptron.innerobj.weights      = (double[])original.multiLayerPerceptron.innerobj.weights.Clone();
     multiLayerPerceptron.innerobj.x            = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
     multiLayerPerceptron.innerobj.y            = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
     allowedInputVariables = (string[])original.allowedInputVariables.Clone();
     if (original.classValues != null)
     {
         this.classValues = (double[])original.classValues.Clone();
     }
 }
Exemple #8
0
        public static IClassificationSolution CreateNeuralNetworkClassificationSolution(IClassificationProblemData problemData, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts,
                                                                                        out double rmsError, out double avgRelError, out double relClassError)
        {
            var    dataset        = problemData.Dataset;
            string targetVariable = problemData.TargetVariable;
            IEnumerable <string> allowedInputVariables = problemData.AllowedInputVariables;
            IEnumerable <int>    rows = problemData.TrainingIndices;

            double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows);
            if (inputMatrix.Cast <double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
            {
                throw new NotSupportedException("Neural network classification does not support NaN or infinity values in the input dataset.");
            }

            int nRows     = inputMatrix.GetLength(0);
            int nFeatures = inputMatrix.GetLength(1) - 1;

            double[] classValues = dataset.GetDoubleValues(targetVariable).Distinct().OrderBy(x => x).ToArray();
            int      nClasses    = classValues.Count();
            // map original class values to values [0..nClasses-1]
            Dictionary <double, double> classIndices = new Dictionary <double, double>();

            for (int i = 0; i < nClasses; i++)
            {
                classIndices[classValues[i]] = i;
            }
            for (int row = 0; row < nRows; row++)
            {
                inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
            }

            alglib.multilayerperceptron multiLayerPerceptron = null;
            if (nLayers == 0)
            {
                alglib.mlpcreatec0(allowedInputVariables.Count(), nClasses, out multiLayerPerceptron);
            }
            else if (nLayers == 1)
            {
                alglib.mlpcreatec1(allowedInputVariables.Count(), nHiddenNodes1, nClasses, out multiLayerPerceptron);
            }
            else if (nLayers == 2)
            {
                alglib.mlpcreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, nClasses, out multiLayerPerceptron);
            }
            else
            {
                throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");
            }
            alglib.mlpreport rep;

            int info;

            // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary
            alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out rep);
            if (info != 2)
            {
                throw new ArgumentException("Error in calculation of neural network classification solution");
            }

            rmsError      = alglib.mlprmserror(multiLayerPerceptron, inputMatrix, nRows);
            avgRelError   = alglib.mlpavgrelerror(multiLayerPerceptron, inputMatrix, nRows);
            relClassError = alglib.mlpclserror(multiLayerPerceptron, inputMatrix, nRows) / (double)nRows;

            var problemDataClone = (IClassificationProblemData)problemData.Clone();

            return(new NeuralNetworkClassificationSolution(new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables, problemDataClone.ClassValues.ToArray()), problemDataClone));
        }
 private NeuralNetworkModel(bool deserializing)
   : base(deserializing) {
   if (deserializing)
     multiLayerPerceptron = new alglib.multilayerperceptron();
 }
Exemple #10
0
 private NeuralNetworkModel(StorableConstructorFlag _) : base(_)
 {
     multiLayerPerceptron = new alglib.multilayerperceptron();
 }