Ejemplo n.º 1
0
        /// <summary>
        /// Create a SVM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                          "SVM Train training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            double defaultGamma = 1.0d / ((SupportVectorMachine)method).InputCount;
            double defaultC     = 1.0d;

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var    holder = new ParamsHolder(args);
            double gamma  = holder.GetDouble(MLTrainFactory.PropertyGamma,
                                             false, defaultGamma);
            double c = holder.GetDouble(MLTrainFactory.PropertyC, false,
                                        defaultC);

            var result = new SVMTrain((SupportVectorMachine)method, training);

            result.Gamma = gamma;
            result.C     = c;
            return(result);
        }
Ejemplo n.º 2
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     if (!(method is SupportVectorMachine))
     {
         throw new EncogError("SVM Train training cannot be used on a method of type: " + method.GetType().FullName);
     }
     double defaultValue = 1.0 / ((double) ((SupportVectorMachine) method).InputCount);
     while (true)
     {
         double num4;
         SVMTrain train;
         double num2 = 1.0;
         IDictionary<string, string> theParams = ArchitectureParse.ParseParams(argsStr);
         ParamsHolder holder = new ParamsHolder(theParams);
         double num3 = holder.GetDouble("GAMMA", false, defaultValue);
         do
         {
             num4 = holder.GetDouble("C", false, num2);
             train = new SVMTrain((SupportVectorMachine) method, training) {
                 Gamma = num3
             };
         }
         while (((uint) defaultValue) > uint.MaxValue);
         if ((((uint) num2) + ((uint) num3)) <= uint.MaxValue)
         {
             train.C = num4;
             return train;
         }
     }
 }
        /// <summary>
        /// Create a SVM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                    "SVM Train training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            double defaultGamma = 1.0d/((SupportVectorMachine) method).InputCount;
            double defaultC = 1.0d;

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            double gamma = holder.GetDouble(MLTrainFactory.PropertyGamma,
                                            false, defaultGamma);
            double c = holder.GetDouble(MLTrainFactory.PropertyC, false,
                                        defaultC);

            var result = new SVMTrain((SupportVectorMachine) method, training);
            result.Gamma = gamma;
            result.C = c;
            return result;
        }
Ejemplo n.º 4
0
 public static int NetworkSize(IMLMethod network)
 {
     if (!(network is IMLEncodable))
     {
         throw new NeuralNetworkError("This machine learning method cannot be encoded:" + network.GetType().FullName);
     }
     return ((IMLEncodable) network).EncodedArrayLength();
 }
Ejemplo n.º 5
0
 public static void ArrayToNetwork(double[] array, IMLMethod network)
 {
     if (!(network is IMLEncodable))
     {
         throw new NeuralNetworkError("This machine learning method cannot be encoded:" + network.GetType().FullName);
     }
     ((IMLEncodable) network).DecodeFromArray(array);
 }
Ejemplo n.º 6
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     if (!(method is SOMNetwork))
     {
         throw new EncogError("Cluster SOM training cannot be used on a method of type: " + method.GetType().FullName);
     }
     return new SOMClusterCopyTraining((SOMNetwork) method, training);
 }
Ejemplo n.º 7
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string args)
 {
     if (!(method is BasicPNN))
     {
         throw new EncogError("PNN training cannot be used on a method of type: " + method.GetType().FullName);
     }
     return new TrainBasicPNN((BasicPNN) method, training);
 }
Ejemplo n.º 8
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string args)
 {
     if (!(method is BasicNetwork))
     {
         throw new EncogError("SCG training cannot be used on a method of type: " + method.GetType().FullName);
     }
     return new ScaledConjugateGradient((BasicNetwork) method, training);
 }
Ejemplo n.º 9
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string args)
 {
     if (!(method is RBFNetwork))
     {
         throw new EncogError("RBF-SVD training cannot be used on a method of type: " + method.GetType().FullName);
     }
     return new SVDTraining((RBFNetwork) method, training);
 }
Ejemplo n.º 10
0
 /// <summary>
 /// Determine the network size.
 /// </summary>
 ///
 /// <param name="network">The network.</param>
 /// <returns>The size.</returns>
 public static int NetworkSize(IMLMethod network)
 {
     if (network is IMLEncodable)
     {
         return(((IMLEncodable)network).EncodedArrayLength());
     }
     throw new NeuralNetworkError(Error
                                  + network.GetType().FullName);
 }
Ejemplo n.º 11
0
 /// <summary>
 /// Use an array to populate the memory of the neural network.
 /// </summary>
 ///
 /// <param name="array">An array of doubles.</param>
 /// <param name="network">The network to encode.</param>
 public static void ArrayToNetwork(double[] array,
                                   IMLMethod network)
 {
     if (network is IMLEncodable)
     {
         ((IMLEncodable)network).DecodeFromArray(array);
         return;
     }
     throw new NeuralNetworkError(Error
                                  + network.GetType().FullName);
 }
Ejemplo n.º 12
0
 /// <summary>
 /// Use an array to populate the memory of the neural network.
 /// </summary>
 ///
 /// <param name="array">An array of doubles.</param>
 /// <param name="network">The network to encode.</param>
 public static void ArrayToNetwork(double[] array,
     IMLMethod network)
 {
     if (network is IMLEncodable)
     {
         ((IMLEncodable) network).DecodeFromArray(array);
         return;
     }
     throw new NeuralNetworkError(Error
                                  + network.GetType().FullName);
 }
Ejemplo n.º 13
0
        /// <summary>
        /// Create a cluster SOM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is SOMNetwork))
            {
                throw new EncogError(
                          "Cluster SOM training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            return(new SOMClusterCopyTraining((SOMNetwork)method, training));
        }
Ejemplo n.º 14
0
        /// <summary>
        /// Create a RBF-SVD trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="args">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String args)
        {
            if (!(method is RBFNetwork))
            {
                throw new EncogError(
                          "RBF-SVD training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            return(new SVDTraining((RBFNetwork)method, training));
        }
Ejemplo n.º 15
0
        /// <summary>
        /// Create a SCG trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="args">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String args)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError(
                          "SCG training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            return(new ScaledConjugateGradient((BasicNetwork)method, training));
        }
Ejemplo n.º 16
0
        /// <summary>
        /// Create a PNN trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="args">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String args)
        {
            if (!(method is BasicPNN))
            {
                throw new EncogError(
                          "PNN training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            return(new TrainBasicPNN((BasicPNN)method, training));
        }
Ejemplo n.º 17
0
        /// <summary>
        /// Convert to an array. This is used with some training algorithms that
        /// require that the "memory" of the neuron(the weight and bias values) be
        /// expressed as a linear array.
        /// </summary>
        ///
        /// <param name="network">The network to encode.</param>
        /// <returns>The memory of the neuron.</returns>
        public static double[] NetworkToArray(IMLMethod network)
        {
            int size = NetworkSize(network);

            if (network is IMLEncodable)
            {
                var encoded = new double[size];
                ((IMLEncodable)network).EncodeToArray(encoded);
                return(encoded);
            }
            throw new NeuralNetworkError(Error
                                         + network.GetType().FullName);
        }
Ejemplo n.º 18
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     if (method is IContainsFlat)
     {
         ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr));
         double initialUpdate = holder.GetDouble("INIT_UPDATE", false, 0.1);
         double maxStep = holder.GetDouble("MAX_STEP", false, 50.0);
         if ((((uint) initialUpdate) - ((uint) maxStep)) >= 0)
         {
             return new ResilientPropagation((IContainsFlat) method, training, initialUpdate, maxStep);
         }
     }
     throw new EncogError("RPROP training cannot be used on a method of type: " + method.GetType().FullName);
 }
Ejemplo n.º 19
0
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError(
                    "LMA training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            var result = new LevenbergMarquardtTraining(
                (BasicNetwork) method, training);
            return result;
        }
Ejemplo n.º 20
0
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
            IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError(
                    "LMA training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            var result = new LevenbergMarquardtTraining(
                (BasicNetwork) method, training);
            return result;
        }
Ejemplo n.º 21
0
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError(
                    "LMA training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            bool useReg = holder.GetBoolean(
                MLTrainFactory.PropertyBayesianRegularization, false, false);

            var result = new LevenbergMarquardtTraining(
                (BasicNetwork) method, training) {UseBayesianRegularization = useReg};
            return result;
        }
        /// <summary>
        /// Create a SVM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                          "SVM Train training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);

            new ParamsHolder(args);

            var    holder     = new ParamsHolder(args);
            double gammaStart = holder.GetDouble(
                PropertyGamma1, false,
                SVMSearchTrain.DefaultGammaBegin);
            double cStart = holder.GetDouble(PropertyC1,
                                             false, SVMSearchTrain.DefaultConstBegin);
            double gammaStop = holder.GetDouble(
                PropertyGamma2, false,
                SVMSearchTrain.DefaultGammaEnd);
            double cStop = holder.GetDouble(PropertyC2,
                                            false, SVMSearchTrain.DefaultConstEnd);
            double gammaStep = holder.GetDouble(
                PropertyGammaStep, false,
                SVMSearchTrain.DefaultGammaStep);
            double cStep = holder.GetDouble(PropertyCStep,
                                            false, SVMSearchTrain.DefaultConstStep);

            var result = new SVMSearchTrain((SupportVectorMachine)method, training)
            {
                GammaBegin = gammaStart,
                GammaEnd   = gammaStop,
                GammaStep  = gammaStep,
                ConstBegin = cStart,
                ConstEnd   = cStop,
                ConstStep  = cStep
            };

            return(result);
        }
Ejemplo n.º 23
0
        /// <summary>
        /// Create a RPROP trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is IContainsFlat))
            {
                throw new EncogError(
                    "RPROP training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            double initialUpdate = holder.GetDouble(
                MLTrainFactory.PropertyInitialUpdate, false,
                RPROPConst.DefaultInitialUpdate);
            double maxStep = holder.GetDouble(
                MLTrainFactory.PropertyMaxStep, false,
                RPROPConst.DefaultMaxStep);

            return new ResilientPropagation((IContainsFlat) method, training,
                                            initialUpdate, maxStep);
        }
Ejemplo n.º 24
0
        /// <summary>
        /// Create a RPROP trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is IContainsFlat))
            {
                throw new EncogError(
                          "RPROP training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var    holder        = new ParamsHolder(args);
            double initialUpdate = holder.GetDouble(
                MLTrainFactory.PropertyInitialUpdate, false,
                RPROPConst.DefaultInitialUpdate);
            double maxStep = holder.GetDouble(
                MLTrainFactory.PropertyMaxStep, false,
                RPROPConst.DefaultMaxStep);

            return(new ResilientPropagation((IContainsFlat)method, training,
                                            initialUpdate, maxStep));
        }
Ejemplo n.º 25
0
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError(
                          "LMA training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var  holder = new ParamsHolder(args);
            bool useReg = holder.GetBoolean(
                MLTrainFactory.PropertyBayesianRegularization, false, false);

            var result = new LevenbergMarquardtTraining(
                (BasicNetwork)method, training)
            {
                UseBayesianRegularization = useReg
            };

            return(result);
        }
Ejemplo n.º 26
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     bool flag;
     LevenbergMarquardtTraining training3;
     if (method is BasicNetwork)
     {
         flag = new ParamsHolder(ArchitectureParse.ParseParams(argsStr)).GetBoolean("BAYES_REG", false, false);
         training3 = new LevenbergMarquardtTraining((BasicNetwork) method, training);
         if (3 == 0)
         {
             LevenbergMarquardtTraining training2;
             return training2;
         }
     }
     else if (0 == 0)
     {
         throw new EncogError("LMA training cannot be used on a method of type: " + method.GetType().FullName);
     }
     training3.UseBayesianRegularization = flag;
     if (0 == 0)
     {
     }
     return training3;
 }
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                    "Neighborhood training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            double learningRate = holder.GetDouble(
                MLTrainFactory.PropertyLearningRate, false, 0.7d);
            String neighborhoodStr = holder.GetString(
                MLTrainFactory.PropertyNeighborhood, false, "rbf");
            String rbfTypeStr = holder.GetString(
                MLTrainFactory.PropertyRBFType, false, "gaussian");

            RBFEnum t;

            if (rbfTypeStr.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Gaussian;
            }
            else if (rbfTypeStr.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Multiquadric;
            }
            else if (rbfTypeStr.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.InverseMultiquadric;
            }
            else if (rbfTypeStr.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.MexicanHat;
            }
            else
            {
                t = RBFEnum.Gaussian;
            }

            INeighborhoodFunction nf = null;

            if (neighborhoodStr.Equals("bubble", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodBubble(1);
            }
            else if (neighborhoodStr.Equals("rbf", StringComparison.InvariantCultureIgnoreCase))
            {
                String str = holder.GetString(
                    MLTrainFactory.PropertyDimensions, true, null);
                int[] size = NumberList.FromListInt(CSVFormat.EgFormat, str);
                nf = new NeighborhoodRBF(size, t);
            }
            else if (neighborhoodStr.Equals("rbf1d", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodRBF1D(t);
            }
            if (neighborhoodStr.Equals("single", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodSingle();
            }

            var result = new BasicTrainSOM((SOMNetwork) method,
                                           learningRate, training, nf);

            if (args.ContainsKey(MLTrainFactory.PropertyIterations))
            {
                int plannedIterations = holder.GetInt(
                    MLTrainFactory.PropertyIterations, false, 1000);
                double startRate = holder.GetDouble(
                    MLTrainFactory.PropertyStartLearningRate, false, 0.05d);
                double endRate = holder.GetDouble(
                    MLTrainFactory.PropertyEndLearningRate, false, 0.05d);
                double startRadius = holder.GetDouble(
                    MLTrainFactory.PropertyStartRadius, false, 10);
                double endRadius = holder.GetDouble(
                    MLTrainFactory.PropertyEndRadius, false, 1);
                result.SetAutoDecay(plannedIterations, startRate, endRate,
                                    startRadius, endRadius);
            }

            return result;
        }
Ejemplo n.º 28
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     ParamsHolder holder;
     double num;
     double num2;
     double num3;
     double num4;
     double num5;
     double num6;
     SVMSearchTrain train2;
     if (method is SupportVectorMachine)
     {
         IDictionary<string, string> theParams = ArchitectureParse.ParseParams(argsStr);
         new ParamsHolder(theParams);
         if ((((uint) num3) - ((uint) num4)) < 0)
         {
             goto Label_0053;
         }
         if ((((uint) num2) + ((uint) num6)) <= uint.MaxValue)
         {
             holder = new ParamsHolder(theParams);
             num = holder.GetDouble("GAMMA1", false, -10.0);
             num2 = holder.GetDouble("C1", false, -5.0);
             goto Label_0101;
         }
         goto Label_016E;
     }
     goto Label_0185;
     Label_0053:
     train2.GammaEnd = num3;
     if ((((uint) num5) + ((uint) num)) > uint.MaxValue)
     {
         goto Label_0185;
     }
     train2.GammaStep = num5;
     if (((uint) num3) >= 0)
     {
         train2.ConstBegin = num2;
         train2.ConstEnd = num4;
         if ((((uint) num4) | 3) != 0)
         {
             train2.ConstStep = num6;
             return train2;
         }
     }
     else
     {
         return train2;
     }
     Label_0101:
     num3 = holder.GetDouble("GAMMA2", false, 10.0);
     num4 = holder.GetDouble("C2", false, 15.0);
     Label_016E:
     if (((uint) num4) <= uint.MaxValue)
     {
         num5 = holder.GetDouble("GAMMASTEP", false, 1.0);
         num6 = holder.GetDouble("CSTEP", false, 2.0);
         train2 = new SVMSearchTrain((SupportVectorMachine) method, training) {
             GammaBegin = num
         };
     }
     goto Label_0053;
     Label_0185:
     throw new EncogError("SVM Train training cannot be used on a method of type: " + method.GetType().FullName);
 }
Ejemplo n.º 29
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     IDictionary<string, string> dictionary;
     ParamsHolder holder;
     double num;
     string str;
     string str2;
     RBFEnum mexicanHat;
     INeighborhoodFunction function;
     string str3;
     int[] numArray;
     BasicTrainSOM nsom;
     int num2;
     double num3;
     double num4;
     double num6;
     if (method is SupportVectorMachine)
     {
         dictionary = ArchitectureParse.ParseParams(argsStr);
         holder = new ParamsHolder(dictionary);
         num = holder.GetDouble("LR", false, 0.7);
         str = holder.GetString("NEIGHBORHOOD", false, "rbf");
         if (2 != 0)
         {
             goto Label_03DF;
         }
         goto Label_039F;
     }
     goto Label_03F4;
     Label_0083:
     num4 = holder.GetDouble("END_LR", false, 0.05);
     double startRadius = holder.GetDouble("START_RADIUS", false, 10.0);
     if ((((uint) num4) + ((uint) num4)) > uint.MaxValue)
     {
         return nsom;
     }
     if ((((uint) num3) + ((uint) num2)) <= uint.MaxValue)
     {
         num6 = holder.GetDouble("END_RADIUS", false, 1.0);
         nsom.SetAutoDecay(num2, num3, num4, startRadius, num6);
         return nsom;
     }
     Label_00E4:
     if (4 == 0)
     {
         if ((((uint) num3) + ((uint) num2)) > uint.MaxValue)
         {
             goto Label_0292;
         }
         goto Label_02F8;
     }
     Label_00EE:
     nsom = new BasicTrainSOM((SOMNetwork) method, num, training, function);
     do
     {
         if (dictionary.ContainsKey("ITERATIONS"))
         {
             do
             {
                 num2 = holder.GetInt("ITERATIONS", false, 0x3e8);
                 num3 = holder.GetDouble("START_LR", false, 0.05);
             }
             while ((((uint) num3) | 15) == 0);
             goto Label_0083;
         }
     }
     while ((((uint) num6) | 0xff) == 0);
     if (0 == 0)
     {
         if ((((uint) num3) + ((uint) num3)) >= 0)
         {
             return nsom;
         }
         goto Label_03F4;
     }
     if ((((uint) num2) - ((uint) startRadius)) <= uint.MaxValue)
     {
         goto Label_00E4;
     }
     goto Label_0083;
     Label_0184:
     if (!str.Equals("single", StringComparison.InvariantCultureIgnoreCase))
     {
         goto Label_00EE;
     }
     function = new NeighborhoodSingle();
     if ((((uint) num6) - ((uint) num3)) >= 0)
     {
         if ((((uint) num) - ((uint) startRadius)) >= 0)
         {
             goto Label_00E4;
         }
         goto Label_0324;
     }
     if ((((uint) num2) & 0) == 0)
     {
         goto Label_0233;
     }
     Label_01E2:
     while (!str.Equals("rbf1d", StringComparison.InvariantCultureIgnoreCase))
     {
         if (0 == 0)
         {
             if ((((uint) num3) - ((uint) num6)) >= 0)
             {
                 goto Label_0184;
             }
             goto Label_0233;
         }
     }
     function = new NeighborhoodRBF1D(mexicanHat);
     if ((((uint) num2) + ((uint) num)) >= 0)
     {
         if (((uint) num6) < 0)
         {
             goto Label_01E2;
         }
         goto Label_0184;
     }
     if (((uint) num2) < 0)
     {
         goto Label_03DF;
     }
     goto Label_01E2;
     Label_0233:
     function = new NeighborhoodRBF(numArray, mexicanHat);
     goto Label_0184;
     Label_0243:
     if (!str.Equals("rbf", StringComparison.InvariantCultureIgnoreCase))
     {
         if ((((uint) num2) & 0) != 0)
         {
             goto Label_03DF;
         }
         goto Label_01E2;
     }
     Label_0292:
     str3 = holder.GetString("DIM", true, null);
     if ((((uint) num3) + ((uint) num)) > uint.MaxValue)
     {
         goto Label_0292;
     }
     numArray = NumberList.FromListInt(CSVFormat.EgFormat, str3);
     if ((((uint) num6) & 0) == 0)
     {
         goto Label_0233;
     }
     goto Label_0243;
     Label_02F8:
     if (str.Equals("bubble", StringComparison.InvariantCultureIgnoreCase))
     {
         function = new NeighborhoodBubble(1);
         goto Label_0184;
     }
     if ((((uint) num3) & 0) == 0)
     {
         goto Label_0243;
     }
     goto Label_0292;
     Label_0324:
     function = null;
     goto Label_02F8;
     Label_0362:
     mexicanHat = RBFEnum.Multiquadric;
     goto Label_0324;
     Label_039F:
     mexicanHat = RBFEnum.Gaussian;
     goto Label_0324;
     Label_03DF:
     str2 = holder.GetString("RBF_TYPE", false, "gaussian");
     if (str2.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase))
     {
         goto Label_039F;
     }
     if (((uint) startRadius) <= uint.MaxValue)
     {
         if (str2.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase))
         {
             goto Label_0362;
         }
         if (!str2.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase) || ((((uint) num2) + ((uint) num2)) < 0))
         {
             if (str2.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase))
             {
                 mexicanHat = RBFEnum.MexicanHat;
             }
             else
             {
                 mexicanHat = RBFEnum.Gaussian;
             }
             goto Label_0324;
         }
     }
     else if (((uint) num3) <= uint.MaxValue)
     {
         goto Label_0362;
     }
     if ((((uint) num3) - ((uint) num3)) <= uint.MaxValue)
     {
         mexicanHat = RBFEnum.InverseMultiquadric;
         goto Label_0324;
     }
     goto Label_00E4;
     Label_03F4:
     throw new EncogError("Neighborhood training cannot be used on a method of type: " + method.GetType().FullName);
 }
Ejemplo n.º 30
0
 public static double[] NetworkToArray(IMLMethod network)
 {
     int num = NetworkSize(network);
     if (!(network is IMLEncodable))
     {
         throw new NeuralNetworkError("This machine learning method cannot be encoded:" + network.GetType().FullName);
     }
     double[] encoded = new double[num];
     ((IMLEncodable) network).EncodeToArray(encoded);
     return encoded;
 }
Ejemplo n.º 31
0
        /// <summary>
        /// Convert to an array. This is used with some training algorithms that
        /// require that the "memory" of the neuron(the weight and bias values) be
        /// expressed as a linear array.
        /// </summary>
        ///
        /// <param name="network">The network to encode.</param>
        /// <returns>The memory of the neuron.</returns>
        public static double[] NetworkToArray(IMLMethod network)
        {
            int size = NetworkSize(network);

            if (network is IMLEncodable)
            {
                var encoded = new double[size];
                ((IMLEncodable) network).EncodeToArray(encoded);
                return encoded;
            }
            throw new NeuralNetworkError(Error
                                         + network.GetType().FullName);
        }
Ejemplo n.º 32
0
 /// <summary>
 /// Determine the network size.
 /// </summary>
 ///
 /// <param name="network">The network.</param>
 /// <returns>The size.</returns>
 public static int NetworkSize(IMLMethod network)
 {
     if (network is IMLEncodable)
     {
         return ((IMLEncodable) network).EncodedArrayLength();
     }
     throw new NeuralNetworkError(Error
                                  + network.GetType().FullName);
 }
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                          "Neighborhood training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            double learningRate = holder.GetDouble(
                MLTrainFactory.PropertyLearningRate, false, 0.7d);
            String neighborhoodStr = holder.GetString(
                MLTrainFactory.PropertyNeighborhood, false, "rbf");
            String rbfTypeStr = holder.GetString(
                MLTrainFactory.PropertyRBFType, false, "gaussian");

            RBFEnum t;

            if (rbfTypeStr.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Gaussian;
            }
            else if (rbfTypeStr.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Multiquadric;
            }
            else if (rbfTypeStr.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.InverseMultiquadric;
            }
            else if (rbfTypeStr.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.MexicanHat;
            }
            else
            {
                t = RBFEnum.Gaussian;
            }

            INeighborhoodFunction nf = null;

            if (neighborhoodStr.Equals("bubble", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodBubble(1);
            }
            else if (neighborhoodStr.Equals("rbf", StringComparison.InvariantCultureIgnoreCase))
            {
                String str = holder.GetString(
                    MLTrainFactory.PropertyDimensions, true, null);
                int[] size = NumberList.FromListInt(CSVFormat.EgFormat, str);
                nf = new NeighborhoodRBF(size, t);
            }
            else if (neighborhoodStr.Equals("rbf1d", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodRBF1D(t);
            }
            if (neighborhoodStr.Equals("single", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodSingle();
            }

            var result = new BasicTrainSOM((SOMNetwork)method,
                                           learningRate, training, nf);

            if (args.ContainsKey(MLTrainFactory.PropertyIterations))
            {
                int plannedIterations = holder.GetInt(
                    MLTrainFactory.PropertyIterations, false, 1000);
                double startRate = holder.GetDouble(
                    MLTrainFactory.PropertyStartLearningRate, false, 0.05d);
                double endRate = holder.GetDouble(
                    MLTrainFactory.PropertyEndLearningRate, false, 0.05d);
                double startRadius = holder.GetDouble(
                    MLTrainFactory.PropertyStartRadius, false, 10);
                double endRadius = holder.GetDouble(
                    MLTrainFactory.PropertyEndRadius, false, 1);
                result.SetAutoDecay(plannedIterations, startRate, endRate,
                                    startRadius, endRadius);
            }

            return(result);
        }
        /// <summary>
        /// Create a SVM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                    "SVM Train training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            new ParamsHolder(args);

            var holder = new ParamsHolder(args);
            double gammaStart = holder.GetDouble(
                PropertyGamma1, false,
                SVMSearchTrain.DefaultGammaBegin);
            double cStart = holder.GetDouble(PropertyC1,
                                             false, SVMSearchTrain.DefaultConstBegin);
            double gammaStop = holder.GetDouble(
                PropertyGamma2, false,
                SVMSearchTrain.DefaultGammaEnd);
            double cStop = holder.GetDouble(PropertyC2,
                                            false, SVMSearchTrain.DefaultConstEnd);
            double gammaStep = holder.GetDouble(
                PropertyGammaStep, false,
                SVMSearchTrain.DefaultGammaStep);
            double cStep = holder.GetDouble(PropertyCStep,
                                            false, SVMSearchTrain.DefaultConstStep);

            var result = new SVMSearchTrain((SupportVectorMachine) method, training)
                             {
                                 GammaBegin = gammaStart,
                                 GammaEnd = gammaStop,
                                 GammaStep = gammaStep,
                                 ConstBegin = cStart,
                                 ConstEnd = cStop,
                                 ConstStep = cStep
                             };

            return result;
        }