public static double TrainNetworks(SupportVectorMachine network, MarketMLDataSet training) { // train the neural network SVMTrain trainMain = new SVMTrain(network, training); StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200); trainMain.AddStrategy(stop); var sw = new Stopwatch(); sw.Start(); while (!stop.ShouldStop()) { trainMain.PreIteration(); trainMain.Iteration(); trainMain.PostIteration(); Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error); } sw.Stop(); Console.WriteLine("SVM Trained in :" + sw.ElapsedMilliseconds + "For error:" + trainMain.Error + " Iterated:" + trainMain.IterationNumber); return(trainMain.Error); }
public static double train(SupportVectorMachine network, IMLDataSet training) { SVMTrain train = new SVMTrain(network, training); train.Iteration(); return(train.Error); }
/// <summary> /// Create a SVM trainer. /// </summary> /// /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is SupportVectorMachine)) { throw new EncogError( "SVM Train training cannot be used on a method of type: " + method.GetType().FullName); } double defaultGamma = 1.0d / ((SupportVectorMachine)method).InputCount; double defaultC = 1.0d; IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); double gamma = holder.GetDouble(MLTrainFactory.PropertyGamma, false, defaultGamma); double c = holder.GetDouble(MLTrainFactory.PropertyC, false, defaultC); var result = new SVMTrain((SupportVectorMachine)method, training); result.Gamma = gamma; result.C = c; return(result); }
/// <summary> /// Trains a random trainer. /// </summary> /// <param name="inputs">The inputs.</param> /// <param name="predictWindow">The predict window.</param> public static void RandomTrainerMethod(int inputs, int predictWindow) { double[] firstinput = MakeInputs(inputs); double[] SecondInput = MakeInputs(inputs); double[] ThirdInputs = MakeInputs(inputs); double[] FourthInputs = MakeInputs(inputs); var pair = SuperUtilsTrainer.ProcessPairs(firstinput, FourthInputs, inputs, predictWindow); var pair2 = SuperUtilsTrainer.ProcessPairs(SecondInput, FourthInputs, inputs, predictWindow); var pair3 = SuperUtilsTrainer.ProcessPairs(ThirdInputs, FourthInputs, inputs, predictWindow); var pair4 = SuperUtilsTrainer.ProcessPairs(FourthInputs, FourthInputs, inputs, predictWindow); BasicMLDataSet SuperSet = new BasicMLDataSet(); SuperSet.Add(pair); SuperSet.Add(pair2); SuperSet.Add(pair3); SuperSet.Add(pair4); SupportVectorMachine machine = Create(SuperSet, inputs); SVMTrain train = new SVMTrain(machine, SuperSet); /// var network = (BasicNetwork)CreateEval.CreateElmanNetwork(SuperSet.InputSize, SuperSet.IdealSize); //double error = CreateEval.TrainNetworks(machine, SuperSet); TrainSVM(train, machine); //Lets create an evaluation. // Console.WriteLine(@"Last error rate on random trainer:" + error); }
protected ProbabilitySupportVectorMachine TrainSVM(double C, double gamma, List <ISample> trainingSamples, Func <ISample, double> idealFunction) { // duplicate the training dataset for better cross validation by LIBSVM probability generator (see LIBSVM documentation) List <double[]> inputSamples = trainingSamples.Select(sample => sample.GetDimensions()).ToList(); inputSamples.AddRange(trainingSamples.Select(sample => sample.GetDimensions())); // account for imposter samples (identifier not in identifierMap) List <double[]> outputSamples = trainingSamples.Select(sample => new double[] { idealFunction.Invoke(sample) }).ToList(); outputSamples.AddRange(trainingSamples.Select(sample => new double[] { idealFunction.Invoke(sample) })); double[][] INPUT = inputSamples.ToArray(); double[][] IDEAL = outputSamples.ToArray(); // train the SVM classifier with the provided data IMLDataSet trainingData = new BasicMLDataSet(INPUT, IDEAL); ProbabilitySupportVectorMachine svmNetwork = new ProbabilitySupportVectorMachine(trainingSamples[0].GetDimensionCount(), false, 0.00000001); // train the SVM classifier with the provided C and gamma SVMTrain trainedSVM = new SVMTrain(svmNetwork, trainingData) { Fold = 0, Gamma = gamma, C = C }; trainedSVM.Iteration(); Console.WriteLine("SVM training error: " + trainedSVM.Error); return(svmNetwork); }
private static SupportVectorMachine Create(IMLDataSet theset, int inputs) { IMLDataSet training = new BasicMLDataSet(theset); SupportVectorMachine result = new SupportVectorMachine(inputs, SVMType.EpsilonSupportVectorRegression, KernelType.Sigmoid); SVMTrain train = new SVMTrain(result, training); train.Iteration(); return(result); }
private SupportVectorMachine Create() { IMLDataSet training = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); SupportVectorMachine result = new SupportVectorMachine(2, SVMType.EpsilonSupportVectorRegression, KernelType.RadialBasisFunction); SVMTrain train = new SVMTrain(result, training); train.Iteration(); return(result); }
/// <summary> /// Train the network, to a specific error, send the output to the console. /// </summary> /// <param name="method">The model to train.</param> /// <param name="trainingSet">The training set to use.</param> /// <param name="error">The error level to train to.</param> public static void TrainToError(IMLMethod method, IMLDataSet trainingSet, double error) { IMLTrain train; if (method is SupportVectorMachine) { train = new SVMTrain((SupportVectorMachine)method, trainingSet); } if (method is FreeformNetwork) { train = new FreeformResilientPropagation((FreeformNetwork)method, trainingSet); } else { train = new ResilientPropagation((IContainsFlat)method, trainingSet); } TrainToError(train, error); }
public static double TrainSVM(SVMTrain train, SupportVectorMachine machine) { StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200); train.AddStrategy(stop); var sw = new Stopwatch(); sw.Start(); while (!stop.ShouldStop()) { train.PreIteration(); train.Iteration(); train.PostIteration(); Console.WriteLine(@"Iteration #:" + train.IterationNumber + @" Error:" + train.Error + " Gamma:" + train.Gamma); } sw.Stop(); Console.WriteLine(@"SVM Trained in :" + sw.ElapsedMilliseconds); return(train.Error); }
public static void train(SupportVectorMachine network, IMLDataSet training) { SVMTrain train = new SVMTrain(network, training); train.Iteration(); }
public void TestSOM2() { // create the training set IMLDataSet training = new BasicMLDataSet( SOMInput2, null); // Create the neural network. var network = new SOMNetwork(4,4); var train = new BasicTrainSOM(network, 0.01, training, new NeighborhoodSingle()) { ForceWinner = true }; int iteration = 0; for (iteration = 0; iteration <= 1000; iteration++) { train.Iteration(); } IMLData data1 = new BasicMLData( SOMInput2[2]); IMLData data2 = new BasicMLData( SOMInput2[0]); IMLData data3 = new BasicMLData( SOMInput2[1]); IMLData data4 = new BasicMLData( SOMInput2[3]); int result1 = network.Classify(data1); int result2 = network.Classify(data2); int result3 = network.Classify(data3); int result4 = network.Classify(data4); Console.WriteLine("Winner in someinput 2 "+network.Winner(new BasicMLData(SOMInput2[0]))); Console.WriteLine("First :" +result1); Console.WriteLine("Second "+result2); Console.WriteLine("Third :" + result3); Console.WriteLine("Fourth " + result4); Assert.IsTrue(result1 != result2); train.TrainPattern(new BasicMLData(SOMInput2[2])); Console.WriteLine("After training pattern: " + network.Winner(new BasicMLData(SOMInput2[1]))); var result = new SupportVectorMachine(4, SVMType.SupportVectorClassification, KernelType.Sigmoid); training = new BasicMLDataSet( SOMInput2, SOMInput2); SVMTrain trainsvm = new SVMTrain(result, training); trainsvm.Iteration(50); result1 = result.Classify(data1); result2 = result.Classify(data2); result3 = result.Classify(data3); result4 = result.Classify(data4); Console.WriteLine("SVM classification : EURUSD 1 :"+result1 + " GBPUSD:"+result2 + " EURCHF :"+result3+ " EURJPY:"+result4 ); }