public SelfOrganizingMapTrainer(SelfOrganizingMap network, double[][] trainingSet, LearningMethod learningMethod, double learningRate) { ReductionFactor = 0.99; TotalError = 1.0; Network = network; TrainingSet = trainingSet; LearningMethod = learningMethod; LearningRate = learningRate; TotalError = 1.0; _outputNeuronCount = network.OutputNeuronCount; _inputNeuronCount = network.InputNeuronCount; for (int i = 0; i < trainingSet.Length; i++) { if (Matrix.CreateColumnMatrix(trainingSet[i]).VectorLength() < MinValue) { throw new Exception("Multiplicative normalization has null training case"); } } BestNetwork = new SelfOrganizingMap(_inputNeuronCount, _outputNeuronCount, network.NormalizationType); _neuronWinCounts = new int[_outputNeuronCount]; _correctionMatrix = new Matrix(_outputNeuronCount, _inputNeuronCount + 1); _workMatrix = LearningMethod == LearningMethod.Additive ? new Matrix(1, _inputNeuronCount + 1) : null; Initialize(); BestError = Double.MaxValue; }
public SelfOrganizingMapTrainer(SelfOrganizingMap network, double[][] trainingSet, LearningMethod learningMethod, double learningRate) { ReductionFactor = 0.99; TotalError = 1.0; Network = network; TrainingSet = trainingSet; LearningMethod = learningMethod; LearningRate = learningRate; TotalError = 1.0; outputNeuronCount = network.OutputNeuronCount; inputNeuronCount = network.InputNeuronCount; for (int i = 0; i < trainingSet.Length; i++) { if (Matrix.CreateColumnMatrix(trainingSet[i]).VectorLength() < MinValue) throw new Exception("Multiplicative normalization has null training case"); } BestNetwork = new SelfOrganizingMap(inputNeuronCount, outputNeuronCount, network.NormalizationType); neuronWinCounts = new int[outputNeuronCount]; correctionMatrix = new Matrix(outputNeuronCount, inputNeuronCount + 1); workMatrix = LearningMethod == LearningMethod.Additive ? new Matrix(1, inputNeuronCount + 1) : null; Initialize(); BestError = Double.MaxValue; }
public object Train(object owner, IContextLookup globalVars) { object theNetwork = Network.EvaluateTyped(owner, globalVars); IEnumerable theData = Data.EvaluateTyped(owner, globalVars); ILearningMethodInfo theLearningMethodInfo = LearningMethod.EvaluateTyped(owner, globalVars); ILearningMethod learningMethod = theLearningMethodInfo.GetLearningMethod(owner, globalVars, theNetwork, theData); IEnumerable theValidationData = ValidationData.EvaluateTyped(owner, globalVars); ITrainingInfo theTrainMode = TrainMode.EvaluateTyped(owner, globalVars); theTrainMode.PerformTraining(learningMethod, theNetwork, theValidationData); return(theNetwork); }
public void LearnSimple(TeachingSet.Element teachingElement, double ratio, ref double[] previousResponse, ref double[] previousError, LearningMethod method) { if (_layers.Count != 1) { throw new InvalidOperationException( "The simple learning algorithm can be applied only to one-layer networks."); } NonLinearNeuron[] layer = _layers[0]; if (previousResponse == null) { previousResponse = new double[layer.Length]; } if (previousError == null) { previousError = new double[layer.Length]; } double[] actualInputs = AppendBias(teachingElement.Inputs, false); for (int neuronIndex = 0; neuronIndex < layer.Length; neuronIndex++) { switch (method) { case LearningMethod.Perceptron: layer[neuronIndex].Learn( actualInputs, teachingElement.ExpectedOutputs[neuronIndex], ratio, out previousResponse[neuronIndex], out previousError[neuronIndex] ); break; case LearningMethod.WidrowHoff: layer[neuronIndex].LearnWidrowHoff( actualInputs, teachingElement.ExpectedOutputs[neuronIndex], ratio, out previousResponse[neuronIndex], out previousError[neuronIndex] ); break; } } }
public NeuralNetwork( Topology topology, double minWeight, double maxWeight, LearningMethod learningMethod, String name, long collectSparseHistoryEvery = 100000) { Topology = topology; this.MethodOfLearning = learningMethod; this.Name = name; NetworkError = 0; NetworkErrorSet = false; this.MinWeight = minWeight; this.MaxWeight = MaxWeight; this.CollectSparseHistoryEvery = collectSparseHistoryEvery; PreviousInputsSet = false; }
/// <summary> /// Construct the trainer for a self organizing map. /// </summary> /// <param name="som">The self organizing map.</param> /// <param name="train">The training method.</param> /// <param name="learnMethod">The learning method.</param> /// <param name="learnRate">The learning rate.</param> public TrainSelfOrganizingMap(SelfOrganizingMap som, double[][] train, LearningMethod learnMethod, double learnRate) { this.som = som; this.train = train; this.totalError = 1.0; this.learnMethod = learnMethod; this.learnRate = learnRate; this.outputNeuronCount = som.OutputNeuronCount; this.inputNeuronCount = som.InputNeuronCount; this.totalError = 1.0; for (int tset = 0; tset < train.Length; tset++) { Matrix.Matrix dptr = Matrix.Matrix.CreateColumnMatrix(train[tset]); if (MatrixMath.vectorLength(dptr) < VERYSMALL) { throw (new System.Exception( "Multiplicative normalization has null training case")); } } this.bestnet = new SelfOrganizingMap(this.inputNeuronCount, this.outputNeuronCount, this.som.NormalizationType); this.won = new int[this.outputNeuronCount]; this.correc = new Matrix.Matrix(this.outputNeuronCount, this.inputNeuronCount + 1); if (this.learnMethod == LearningMethod.ADDITIVE) { this.work = new Matrix.Matrix(1, this.inputNeuronCount + 1); } else { this.work = null; } Initialize(); this.bestError = Double.MaxValue; }
// Running network with learning part public void Learn(List <LinkedList <double> > dataSet, List <LinkedList <double> > expectedResult, int epochAmount, LearningMethod method, bool verbose = false) { int iteration = 0; bool errorCondition = false; if (epochAmount < 1) { errorCondition = true; epochAmount = 0; } do { // Stop learning condition if (errorCondition) { if (iteration++ != 0 && Layers.Last().ObjectiveFunction() < permittedError) { break; } } else { if (iteration++ > epochAmount) { break; } } Shuffle(ref dataSet, ref expectedResult); if (IsDelta(method)) { Delta(dataSet, expectedResult); } else if (IsMRII(method)) { MRII(dataSet, expectedResult); } else { break; // no other learning method implemented for MADALINE } } while (true); if (verbose) { Console.WriteLine("Epochs: " + iteration.ToString()); PrintLayers(); } }
// Running network with learning part public void Learn(List <LinkedList <double> > dataSet, List <LinkedList <double> > expectedResult, int epochAmount = -1, LearningMethod method = LearningMethod.BackpropagationOnline, bool verbose = false) { int iteration = 0; bool errorCondition = false; if (epochAmount < 1) { errorCondition = true; epochAmount = 0; } do { // Stop learning condition if (errorCondition) { if (iteration++ != 0 && Layers.Last().ObjectiveFunction() < permittedError) { break; } } else { if (iteration++ > epochAmount) { break; } } Shuffle(ref dataSet, ref expectedResult); //Run and learn var data = dataSet.Zip(expectedResult, (n, w) => new { dataSet = n, expectedResult = w }); foreach (var row in data) { Examine(new LinkedList <double>(row.dataSet)); if (IsOnline(method)) { Backpropagation(row.expectedResult); } } if (IsOffline(method)) { Backpropagation(expectedResult.Last()); } } while (true); if (verbose) { Console.WriteLine($"Epochs amount = {iteration}; Objective Function = {Layers.Last().ObjectiveFunction()}"); PrintLayers(); } }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public void moveAllInstances(org.maltparser.ml.LearningMethod method, org.maltparser.core.feature.function.FeatureFunction divideFeature, java.util.ArrayList<int> divideFeatureIndexVector) throws org.maltparser.core.exception.MaltChainedException public virtual void moveAllInstances(LearningMethod method, FeatureFunction divideFeature, List <int> divideFeatureIndexVector) { if (method == null) { throw new LibException("The learning method cannot be found. "); } else if (divideFeature == null) { throw new LibException("The divide feature cannot be found. "); } try { //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final java.io.BufferedReader in = new java.io.BufferedReader(getInstanceInputStreamReader(".ins")); StreamReader @in = new StreamReader(getInstanceInputStreamReader(".ins")); //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final java.io.BufferedWriter out = method.getInstanceWriter(); StreamWriter @out = method.InstanceWriter; //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final StringBuilder sb = new StringBuilder(6); StringBuilder sb = new StringBuilder(6); int l = @in.Read(); char c; int j = 0; while (true) { if (l == -1) { sb.Length = 0; break; } c = (char)l; l = @in.Read(); if (c == '\t') { if (divideFeatureIndexVector.Contains(j - 1)) { @out.Write(Convert.ToString(((SingleFeatureValue)divideFeature.FeatureValue).IndexCode)); @out.BaseStream.WriteByte('\t'); } @out.Write(sb.ToString()); j++; @out.BaseStream.WriteByte('\t'); sb.Length = 0; } else if (c == '\n') { @out.Write(sb.ToString()); if (divideFeatureIndexVector.Contains(j - 1)) { @out.BaseStream.WriteByte('\t'); @out.Write(Convert.ToString(((SingleFeatureValue)divideFeature.FeatureValue).IndexCode)); } @out.BaseStream.WriteByte('\n'); sb.Length = 0; method.increaseNumberOfInstances(); decreaseNumberOfInstances(); j = 0; } else { sb.Append(c); } } @in.Close(); getFile(".ins").delete(); @out.Flush(); } catch (SecurityException e) { throw new LibException("The learner cannot remove the instance file. ", e); } catch (NullReferenceException e) { throw new LibException("The instance file cannot be found. ", e); } catch (FileNotFoundException e) { throw new LibException("The instance file cannot be found. ", e); } catch (IOException e) { throw new LibException("The learner read from the instance file. ", e); } }
/// <summary> /// Constructs an atomic model. /// </summary> /// <param name="index"> the index of the atomic model (-1..n), where -1 is special value (used by a single model /// or the master divide model) and n is number of divide models. </param> /// <param name="parent"> the parent guide model. </param> /// <exception cref="MaltChainedException"> </exception> //JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public AtomicModel(int index, org.maltparser.parser.guide.Model parent) throws org.maltparser.core.exception.MaltChainedException public AtomicModel(int index, Model parent) { this.parent = parent; this.index = index; if (index == -1) { modelName = parent.ModelName + "."; } else { modelName = parent.ModelName + "." + (new Formatter()).format("%03d", index) + "."; } // this.featureVector = featureVector; frequency = 0; int?learnerMode = null; if (Guide.GuideMode == ClassifierGuide_GuideMode.CLASSIFY) { learnerMode = LearningMethod_Fields.CLASSIFY; } else if (Guide.GuideMode == ClassifierGuide_GuideMode.BATCH) { learnerMode = LearningMethod_Fields.BATCH; } // start init learning method Type clazz = (Type)Guide.Configuration.getOptionValue("guide", "learner"); if (clazz == typeof(LibSvm)) { method = new LibSvm(this, learnerMode); } else if (clazz == typeof(LibLinear)) { method = new LibLinear(this, learnerMode); } else { object[] arguments = new object[] { this, learnerMode }; try { //JAVA TO C# CONVERTER WARNING: Java wildcard generics have no direct equivalent in .NET: //ORIGINAL LINE: Constructor<?> constructor = clazz.getConstructor(argTypes); System.Reflection.ConstructorInfo <object> constructor = clazz.GetConstructor(argTypes); method = (LearningMethod)constructor.newInstance(arguments); } catch (NoSuchMethodException e) { //JAVA TO C# CONVERTER WARNING: The .NET Type.FullName property will not always yield results identical to the Java Class.getName method: throw new GuideException("The learner class '" + clazz.FullName + "' cannot be initialized. ", e); } catch (InstantiationException e) { //JAVA TO C# CONVERTER WARNING: The .NET Type.FullName property will not always yield results identical to the Java Class.getName method: throw new GuideException("The learner class '" + clazz.FullName + "' cannot be initialized. ", e); } catch (IllegalAccessException e) { //JAVA TO C# CONVERTER WARNING: The .NET Type.FullName property will not always yield results identical to the Java Class.getName method: throw new GuideException("The learner class '" + clazz.FullName + "' cannot be initialized. ", e); } catch (InvocationTargetException e) { //JAVA TO C# CONVERTER WARNING: The .NET Type.FullName property will not always yield results identical to the Java Class.getName method: throw new GuideException("The learner class '" + clazz.FullName + "' cannot be initialized. ", e); } } // end init learning method if (learnerMode.Value == LearningMethod_Fields.BATCH && index == -1 && Guide.Configuration != null) { Guide.Configuration.writeInfoToConfigFile(method.ToString()); } }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public void moveAllInstances(org.maltparser.ml.LearningMethod method, org.maltparser.core.feature.function.FeatureFunction divideFeature, java.util.ArrayList<int> divideFeatureIndexVector) throws org.maltparser.core.exception.MaltChainedException public virtual void moveAllInstances(LearningMethod method, FeatureFunction divideFeature, List <int> divideFeatureIndexVector) { }
// Returns true if method is Madaline Rule II public static bool IsMRII(LearningMethod learningMethod) { return(learningMethod == LearningMethod.MadalineRuleII); }
// Returns true if method is Delta public static bool IsDelta(LearningMethod learningMethod) { return(learningMethod == LearningMethod.Delta); }
// Returns true if method is Backpropagation offline public static bool IsOffline(LearningMethod learningMethod) { return(learningMethod == LearningMethod.BackpropagationOffline); }
public static NeuralNetwork LoadNetworkFromFile(String filename) { NeuralNetwork neuralNetwork = null; try { StreamReader sr = new StreamReader(filename); String str = sr.ReadLine(); // headers String[] strTab; str = sr.ReadLine(); strTab = str.Split(';'); bool isNetworkUsingBias = bool.Parse(strTab[1]); str = sr.ReadLine(); strTab = str.Split(';'); double minWeight = double.Parse(strTab[1]); str = sr.ReadLine(); strTab = str.Split(';'); double maxWeight = double.Parse(strTab[1]); str = sr.ReadLine(); strTab = str.Split(';'); LearningMethod methodOfLearning = strTab[1] == "0" ? LearningMethod.LINEAR : LearningMethod.NOT_LINEAR; str = sr.ReadLine(); strTab = str.Split(';'); String name = strTab[1]; String[] layersNeuronsStr = sr.ReadLine().Split(';'); // neurons in layers String[] layersActivationFunctionStr = sr.ReadLine().Split(';'); // neurons in layers List <LayerCreationInfo> layerCreationInfos = new List <LayerCreationInfo>(); for (int i = 1; i < layersNeuronsStr.Length; i++) { int layerIdx = i - 1; LayerCreationInfo lci = new LayerCreationInfo(); lci.HowManyNeuronsPerLayer = int.Parse(layersNeuronsStr[i]); lci.LayerNo = layerIdx; lci.PreviousLayerNeuronsCount = layerIdx == 0 ? 0 : layerCreationInfos[layerIdx - 1].HowManyNeuronsPerLayer; int LayerActivationFunctionInt = int.Parse(layersActivationFunctionStr[i]); lci.LayerActivationFunction = GetActivationFunctionById(LayerActivationFunctionInt); layerCreationInfos.Add(lci); } Topology topology = new Topology(layerCreationInfos, isNetworkUsingBias, minWeight, maxWeight); neuralNetwork = new NeuralNetwork(topology, minWeight, maxWeight, methodOfLearning, name); // provide saved neurons weights: for (int layerNo = 0; layerNo < neuralNetwork.Topology.Layers.Count; layerNo++) { Layer layer = neuralNetwork.Topology.Layers[layerNo]; for (int neuronNo = 0; neuronNo < layer.Neurons.Count; neuronNo++) { String[] inputsString = sr.ReadLine().Split(';'); for (int inputNo = 0; inputNo < layer.Neurons[neuronNo].Inputs.Count; inputNo++) { layer.Neurons[neuronNo].Inputs[inputNo].Weight = double.Parse(inputsString[inputNo + 1]); } } } neuralNetwork.PropagateValuesForward(); // maybe not needed now, but for order it's good to get network in proper state. return(neuralNetwork); } catch (Exception ex) { Console.WriteLine("Error while loading network: " + ex.Message); return(null); } }