/// <summary> /// Initializes a new instance of the <see cref="BaseSupportVectorRegression"/> class. /// </summary> /// /// <param name="machine">The machine to be learned.</param> /// <param name="inputs">The input data.</param> /// <param name="outputs">The corresponding output data.</param> /// protected BaseSupportVectorRegression(SupportVectorMachine machine, double[][] inputs, double[] outputs) { // Initial argument checking SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs); // Machine this.machine = machine; // Kernel (if applicable) KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine; if (ksvm == null) { isLinear = true; Linear linear = new Linear(0); kernel = linear; } else { Linear linear = ksvm.Kernel as Linear; isLinear = linear != null && linear.Constant == 0; kernel = ksvm.Kernel; } // Learning data this.inputs = inputs; this.outputs = outputs; }
/// <summary> /// Constructs a new one-class support vector learning algorithm. /// </summary> /// /// <param name="machine">A support vector machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// public OneclassSupportVectorLearning(SupportVectorMachine machine, double[][] inputs) { // Initial argument checking if (machine == null) throw new ArgumentNullException("machine"); if (inputs == null) throw new ArgumentNullException("inputs"); this.inputs = inputs; this.machine = machine; this.zeros = new double[inputs.Length]; this.ones = new int[inputs.Length]; this.alpha = new double[inputs.Length]; for (int i = 0; i < alpha.Length; i++) alpha[i] = 1; for (int i = 0; i < ones.Length; i++) ones[i] = 1; // Kernel (if applicable) var ksvm = machine as KernelSupportVectorMachine; if (ksvm == null) { kernel = new Linear(0); } else { kernel = ksvm.Kernel; } }
public static void CheckArgs(SupportVectorMachine machine, double[][] inputs, int[] outputs) { // Initial argument checking if (machine == null) throw new ArgumentNullException("machine"); if (inputs == null) throw new ArgumentNullException("inputs"); if (outputs == null) throw new ArgumentNullException("outputs"); if (inputs.Length != outputs.Length) throw new DimensionMismatchException("outputs", "The number of input vectors and output labels does not match."); checkInputs(machine, inputs); for (int i = 0; i < outputs.Length; i++) { if (outputs[i] != 1 && outputs[i] != -1) { throw new ArgumentOutOfRangeException("outputs", "The output label at index " + i + " should be either +1 or -1."); } } }
public static void CheckArgs(SupportVectorMachine machine, double[][] inputs, int[] outputs) { // Initial argument checking if (machine == null) throw new ArgumentNullException("machine"); if (inputs == null) throw new ArgumentNullException("inputs"); if (outputs == null) throw new ArgumentNullException("outputs"); if (inputs.Length != outputs.Length) throw new DimensionMismatchException("outputs", "The number of input vectors and output labels does not match."); if (inputs.Length == 0) throw new ArgumentOutOfRangeException("inputs", "Training algorithm needs at least one training vector."); if (machine.Inputs > 0) { // This machine has a fixed input vector size for (int i = 0; i < inputs.Length; i++) { if (inputs[i] == null) { throw new ArgumentNullException("inputs", "The input vector at index " + i + " is null."); } if (inputs[i].Length != machine.Inputs) { throw new DimensionMismatchException("inputs", "The size of the input vector at index " + i + " does not match the expected number of inputs of the machine." + " All input vectors for this machine must have length " + machine.Inputs); } for (int j = 0; j < inputs[i].Length; j++) { if (Double.IsNaN(inputs[i][j])) throw new ArgumentException("The input vector at index "+ i + " contains NaN values."); if (Double.IsInfinity(inputs[i][j])) throw new ArgumentException("The input vector at index " + i + " contains infinity values."); } } } for (int i = 0; i < outputs.Length; i++) { if (outputs[i] != 1 && outputs[i] != -1) { throw new ArgumentOutOfRangeException("outputs", "The output label at index " + i + " should be either +1 or -1."); } } }
/// <summary> /// Constructs a new coordinate descent algorithm for L1-loss and L2-loss SVM dual problems. /// </summary> /// /// <param name="machine">A support vector machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param> /// public LinearCoordinateDescent(SupportVectorMachine machine, double[][] inputs, int[] outputs) : base(machine, inputs, outputs) { int samples = inputs.Length; int dimension = inputs[0].Length; if (!IsLinear) { throw new ArgumentException("Only linear machines are supported.", "machine"); } // Lagrange multipliers this.alpha = new double[samples]; this.weights = new double[dimension]; }
public static void predict(SupportVectorMachine network, SupportVectorMachine network2) { Console.WriteLine(@"Year\tActual\tPredict\tClosed Loops"); for (int year = EVALUATE_START; year < EVALUATE_END; year++) { // calculate based on actual data IMLData input = new BasicMLData(WINDOW_SIZE); for (int i = 0; i < input.Count; i++) { input.Data[i] = normalizedSunspots[(year - WINDOW_SIZE) + i]; //input.setData(i,this.normalizedSunspots[(year-WINDOW_SIZE)+i]); } IMLData output = network.Compute(input); IMLData output2 = network2.Compute(input); double prediction = output.Data[0]; double prediction2 = output2.Data[0]; closedLoopSunspots[year] = prediction; // calculate "closed loop", based on predicted data for (int i = 0; i < input.Count; i++) { input.Data[i] = closedLoopSunspots[(year - WINDOW_SIZE) + i]; //input.setData(i,this.closedLoopSunspots[(year-WINDOW_SIZE)+i]); } output = network.Compute(input); double closedLoopPrediction = output[0]; IMLData output3 = network2.Compute(input); double closedLoopPrediction2 = output[0]; // display //System.out.println((STARTING_YEAR+year) // +"\t"+f.format(this.normalizedSunspots[year]) // +"\t"+f.format(prediction) // +"\t"+f.format(closedLoopPrediction) Console.WriteLine(((STARTING_YEAR + year) + @"\t " + Format.FormatDouble(SUNSPOTS[year], 4) + @"\t " + Format.FormatDouble(normalizedSunspots[year], 4) + @"\t " + Format.FormatDouble(prediction, 4) + @"\t " + Format.FormatDouble(prediction2, 4) + @"\t " + Format.FormatDouble(closedLoopPrediction, 4) + @"\t " + Format.FormatDouble(closedLoopPrediction2, 4) )); } }
public static SupportVectorMachine SVMSearch(SupportVectorMachine anetwork, IMLDataSet training) { SVMSearchTrain bestsearch = new SVMSearchTrain(anetwork, training); StopTrainingStrategy stop = new StopTrainingStrategy(0.00000000001, 1); bestsearch.AddStrategy(stop); while (bestsearch.IterationNumber < 30 && !stop.ShouldStop()) { bestsearch.Iteration(); Console.WriteLine("Iteration #" + bestsearch.IterationNumber + " Error :" + bestsearch.Error); } bestsearch.FinishTraining(); return(anetwork); }
public ICU() { InitializeComponent(); current_frame_num1 = 0; current_frame_num2 = 0; F_E = new FeaturesExtraction(); knn = Serializer.Load <KNearestNeighbors>(Path.Combine(path, "knn7.bin")); RF = Serializer.Load <RandomForest>(Path.Combine(path, "RF7.bin")); LR = Serializer.Load <LogisticRegression>(Path.Combine(path, "LR7.bin")); SVM = Serializer.Load <SupportVectorMachine <Gaussian> >(Path.Combine(path, "SVM7.bin")); NB = Serializer.Load <NaiveBayes>(Path.Combine(path, "NB7.bin")); HMM = Serializer.Load <HiddenMarkovModel>(Path.Combine(path, "HMM_seq7.bin")); dataGridView1.RowTemplate.Height = 120; ((DataGridViewImageColumn)dataGridView1.Columns[0]).ImageLayout = DataGridViewImageCellLayout.Stretch; dataGridView1.Columns[1].Visible = false; }
public double Learn(double[][] observations, int[] labels) { var learn = new SequentialMinimalOptimization <NormalizedPolynomial>() { UseKernelEstimation = true, Kernel = new NormalizedPolynomial(1) }; machine = learn.Learn(observations, labels); bool[] output = machine.Decide(observations); int[] zeroOneAnswers = output.ToZeroOne(); double ratio = 1 - (new AccuracyLoss(labels).Loss(zeroOneAnswers)); return(ratio); }
/// <summary> /// Makes the serialization. /// </summary> /// <param name="svm">The SVM.</param> /// <param name="path">The path.</param> public static void SerializeSVM(SupportVectorMachine <Gaussian> svm, string path) { SVMGaussianData data = new SVMGaussianData(); data.Initialize( svm.NumberOfInputs, svm.NumberOfOutputs, svm.SupportVectors, svm.Threshold, svm.Weights, svm.Kernel.Gamma, svm.Kernel.Sigma, svm.Kernel.SigmaSquared); Serialize(data, path); }
public void Learn(IList <XYtoZ> dsLearn) { double [][] inputs = dsLearn.Select(i => new double[] { i.X, i.Y }).ToArray(); double [] outputs = dsLearn.Select(i => i.Z).ToArray(); var fclsvr = new FanChenLinSupportVectorRegression <Gaussian>() { Tolerance = _tolerance, UseKernelEstimation = _useKernelEstimation, UseComplexityHeuristic = _useComplexityHeuristic, Complexity = _complexity, Kernel = new Gaussian() }; _supportVectorMachine = fclsvr.Learn(inputs, outputs); }
void Update() { timeLap = timeLap + Time.deltaTime; GameObject[] PODs = GameObject.FindGameObjectsWithTag("SVMPlayer"); if (PODs.Length == 0) { timesRessurect = timesRessurect + 1; decisionThrust = DecisionThrust(InpThrust, OutThrust); decisionSteer = DecisionSteer(InpSteer, OutSteer); CarInstantiate(); dataSizeSt = 1; dataSizeTh = 1; InpThrust = new double[dataSizeTh][]; InpThrust[0] = new double[4]; OutThrust = new int[1]; OutThrust[0] = 1; InpSteer = new double[dataSizeSt][]; InpSteer[0] = new double[5]; OutSteer = new int[1]; OutSteer[0] = 0; for (int i = 0; i < 4; i++) { InpThrust[0][i] = i; } for (int i = 0; i < 5; i++) { InpSteer[0][i] = i; } } if (timeScale != timeScaleAnt) { if (PODs.Length != 0) { for (int i = 0; i < PODs.Length; i++) { PODs[i].GetComponent <SVMPlayerController>().SetTimeScale(timeScale); } } timeScaleAnt = timeScale; } }
public void TransformTest() { var inputs = yinyang.Submatrix(null, 0, 1).ToJagged(); var labels = yinyang.GetColumn(2).ToInt32(); ConfusionMatrix actual, expected; SequentialMinimalOptimization a, b; var kernel = new Polynomial(2, 0); { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); a = new SequentialMinimalOptimization(machine, inputs, labels); a.UseComplexityHeuristic = true; a.Run(); int[] values = new int[labels.Length]; for (int i = 0; i < values.Length; i++) { values[i] = Math.Sign(machine.Compute(inputs[i])); } expected = new ConfusionMatrix(values, labels); } { var projection = inputs.Apply(kernel.Transform); var machine = new SupportVectorMachine(projection[0].Length); b = new SequentialMinimalOptimization(machine, projection, labels); b.UseComplexityHeuristic = true; b.Run(); int[] values = new int[labels.Length]; for (int i = 0; i < values.Length; i++) { values[i] = Math.Sign(machine.Compute(projection[i])); } actual = new ConfusionMatrix(values, labels); } Assert.AreEqual(a.Complexity, b.Complexity, 1e-15); Assert.AreEqual(expected.TrueNegatives, actual.TrueNegatives); Assert.AreEqual(expected.TruePositives, actual.TruePositives); Assert.AreEqual(expected.FalseNegatives, actual.FalseNegatives); Assert.AreEqual(expected.FalsePositives, actual.FalsePositives); }
/// <summary> /// <inheritdoc /> /// </summary> public override void Train() { var inputs = data.GetSelectedInput(features); var outputs = data.GetExpectedClassificationOutput(); var teacher = new LeastSquaresLearning <Gaussian, double[]>() { Kernel = new Gaussian(), UseComplexityHeuristic = true, WeightRatio = 2.0, UseKernelEstimation = true, }; svm = teacher.Learn(inputs, outputs); Save(); }
/// <summary> /// Construct a trainer for an SVM network. /// </summary> /// /// <param name="method">The method to train.</param> /// <param name="training">The training data for this network.</param> public SVMSearchTrain(SupportVectorMachine method, IMLDataSet training) : base(TrainingImplementationType.Iterative) { _fold = 0; _constBegin = DefaultConstBegin; _constStep = DefaultConstStep; _constEnd = DefaultConstEnd; _gammaBegin = DefaultGammaBegin; _gammaEnd = DefaultGammaEnd; _gammaStep = DefaultGammaStep; _network = method; Training = training; _isSetup = false; _trainingDone = false; _internalTrain = new SVMTrain(_network, training); }
public void Train() { var inputsOutputs = unitOfWork.Matches.GetBioMeasuresForTraining(); var inputs = CastListOfBioMeasuresToListOfDoubles(inputsOutputs); var outputs = inputsOutputs .Select(match => Convert.ToBoolean((int)match.FirstOrDefault().Match.MatchResult)) .ToArray(); var smo = new SequentialMinimalOptimization <Gaussian>() { Complexity = 100 }; svm = smo.Learn(inputs, outputs); bool[] prediction = svm.Decide(inputs); }
public double Learn(double[][] observations, int[] labels) { var learn = new SequentialMinimalOptimization <Gaussian>() { UseComplexityHeuristic = true, Kernel = new Gaussian(1.2) }; machine = learn.Learn(observations, labels); bool[] output = machine.Decide(observations); int[] zeroOneAnswers = output.ToZeroOne(); double ratio = 1 - (new AccuracyLoss(labels).Loss(zeroOneAnswers)); return(ratio); }
public void Train(List <TrainingValue> trainingData) { List <DecisionVariable> trainingVariables = new List <DecisionVariable>(); for (int i = 0; i < featureSize; i++) { trainingVariables.Add(DecisionVariable.Continuous(i.ToString())); } tree = new DecisionTree(inputs: trainingVariables, classes: 2); double[][] featuresArray = new double[trainingData.Count][]; int[] labels = new int[trainingData.Count]; for (int i = 0; i < featuresArray.Length; i++) { featuresArray[i] = trainingData[i].Features; labels[i] = Convert.ToInt32(trainingData[i].State); } switch (type) { case ClassifierType.DecisionTree: C45Learning teacher = new C45Learning(tree); teacher.Learn(featuresArray, labels); break; case ClassifierType.LDA: LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(); pipeline = lda.Learn(featuresArray, labels); break; case ClassifierType.SVM: LinearCoordinateDescent svmLearner = new LinearCoordinateDescent(); svm = svmLearner.Learn(featuresArray, labels); break; case ClassifierType.Bayes: NaiveBayesLearning <NormalDistribution> learner = new NaiveBayesLearning <NormalDistribution>(); bayes = learner.Learn(featuresArray, labels); break; } Trained = true; }
private void btnSampleRunAnalysis_Click(object sender, EventArgs e) { if (!isTrainingDataLoaded) { MessageBox.Show("Please load your training data first"); return; } // Creates a matrix from the entire source data table double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(out columnNames); // Get only the input vector values (in the first two columns) double[][] inputs = ConvertDataTableToMatrix(TrainingData.Tables["InterestedTrainingDataValues"]); // Get only the output labels (last column) int[] outputs = table.GetColumn(2).ToInt32(); // Creates a new instance of the SMO learning algorithm var smo = new SequentialMinimalOptimization <IKernel>() { // Set learning parameters Complexity = (double)numC.Value, Tolerance = (double)numT.Value, PositiveWeight = (double)numPositiveWeight.Value, NegativeWeight = (double)numNegativeWeight.Value, Kernel = createKernel() }; try { // Run svm = smo.Learn(inputs, outputs); } catch (ConvergenceException) { MessageBox.Show("Convergence could not be attained, The learned machine might still be usable"); } createSurface(table); MessageBox.Show("Training Complete"); }
public void SerializeTest() { SupportVectorMachine machine1 = new SupportVectorMachine( new ChiSquare(), new float[][] { new float[] { 1, 2 }, new float[] { 3, 4 }, new float[] { 5, 6 }, }, new float[] { 0.1f, 0.2f, 0.3f }, 0.4f); string s1 = JsonConvert.SerializeObject(machine1); SupportVectorMachine machine2 = JsonConvert.DeserializeObject <SupportVectorMachine>(s1); string s2 = JsonConvert.SerializeObject(machine2); Assert.AreEqual(s1, s2); }
public void BuildSVM(List <train> datalist) { double[][] inputs; int[] outputs; GetData(out inputs, out outputs, datalist); // Now, we can create the sequential minimal optimization teacher var learn = new SequentialMinimalOptimization() { UseComplexityHeuristic = true, UseKernelEstimation = false }; // And then we can obtain a trained SVM by calling its Learn method svm = learn.Learn(inputs, outputs); log("svm model has been trained"); }
public static void CheckArgs(SupportVectorMachine machine, double[][] inputs, double[] outputs) { // Initial argument checking if (machine == null) throw new ArgumentNullException("machine"); if (inputs == null) throw new ArgumentNullException("inputs"); if (outputs == null) throw new ArgumentNullException("outputs"); if (inputs.Length != outputs.Length) throw new DimensionMismatchException("outputs", "The number of input vectors and output labels does not match."); checkInputs(machine, inputs); }
private void btTrainingProses_Click(object sender, EventArgs e) { if (isTrainingLoaded is true) { var learn = new SequentialMinimalOptimization <Gaussian>() { UseComplexityHeuristic = true, UseKernelEstimation = true }; this.svm = learn.Learn(fitur4training, trainingClass); //Save_Result("../training", DateTime.Now.ToString("yyyyMMdd#HHmmss")); Save_Result(svm_model_collection_path, DateTime.Now.ToString("yyyyMMdd#HHmmss")); isLearningDone = true; webBrowser1.Refresh(); } else if (isLearningDone is true && isTrainingLoaded is false) { MessageBox.Show("Training Success"); }
private SupportVectorMachine <Linear> getSVMRegression(GeoWave geoWave, int labelIdx, bool[] Dim2TakeNode, ref double[] svmApprox) { SupportVectorMachine <Linear> svmRegression = null; double[][] dataForRegression = new double[geoWave.pointsIdArray.Count][]; double[] labelForRegression = new double[geoWave.pointsIdArray.Count]; int amountOfFeatures = training_dt[0].Length; for (int i = 0; i < geoWave.pointsIdArray.Count; i++) { int index = geoWave.pointsIdArray[i]; dataForRegression[i] = new double[userConfig.nFeatures]; int k = 0; for (int j = 0; j < amountOfFeatures; j++) { if (Dim2TakeNode[j]) { dataForRegression[i][k] = training_dt[index][j]; k++; } } labelForRegression[i] = training_label[index][labelIdx]; } LinearRegressionNewtonMethod tmpSvmRegression = new LinearRegressionNewtonMethod() { UseComplexityHeuristic = true }; try { svmRegression = tmpSvmRegression.Learn(dataForRegression, labelForRegression); svmApprox = svmRegression.Score(dataForRegression); } catch (Exception e) { return(null); } if (svmApprox.Contains(double.NaN)) { return(null); } return(svmRegression); }
private void doSVMRegressopSplit(GeoWave currentWave, Dictionary <SplitType, SplitData> splitsData, double currentError, bool[] Dim2TakeNode) { double lowestError = currentError; double[] bestSvmApprox = new double[training_dt.Length]; for (int labelIdx = 0; labelIdx < training_label[0].Length; labelIdx++) { double[] svmApprox = new double[currentWave.pointsIdArray.Count]; SupportVectorMachine <Linear> svmRegression = getSVMRegression(currentWave, labelIdx, Dim2TakeNode, ref svmApprox); if (null != svmRegression) { double[] tmpSvmApporx = new double[training_dt.Length]; for (int i = 0; i < currentWave.pointsIdArray.Count; i++) { int index = currentWave.pointsIdArray[i]; tmpSvmApporx[index] = svmApprox[i]; } double svmSplitValue = 0; double error = getBestSVMRegressionSplit(currentWave, tmpSvmApporx, ref svmSplitValue); if (error < lowestError) { lowestError = error; currentWave.svmRegressionSplitsParameters.svmRegression = svmRegression; currentWave.svmRegressionSplitsParameters.labelIdx = labelIdx; currentWave.svmRegressionSplitsParameters.svmRegressionSplitValue = svmSplitValue; currentWave.svmRegressionSplitsParameters.Dim2TakeNode = Dim2TakeNode; bestSvmApprox = tmpSvmApporx; } } } if (lowestError >= currentError) { return; } GeoWave child0 = new GeoWave(currentWave.isotropicSplitsParameters.boundingBox, training_label[0].Count()); GeoWave child1 = new GeoWave(currentWave.isotropicSplitsParameters.boundingBox, training_label[0].Count()); setChildrensPointsAndMeanValueSVMRegression(child0, child1, currentWave, bestSvmApprox); splitsData.Add(SplitType.SVM_REGRESSION_SPLITS, new SplitData(lowestError, child0, child1)); }
public TimeSeries Forecast(SupportVectorMachine network, NormalizeArray norm, TimeSeries simulatedData, List <DateTime> futureTimes) { int data_count = simulatedData.Count; int future_data_count = futureTimes.Count; double[] data = new double[data_count + future_data_count]; for (int idx = 0; idx < data_count; ++idx) { data[idx] = simulatedData[idx]; } for (int idx = 0; idx < future_data_count; ++idx) { data[data_count + idx] = 0; } TimeSeries ts = new TimeSeries(); double input_val = 0; for (int idx = 0; idx < future_data_count; ++idx) { var input = new BasicMLData(WindowSize); for (var i = 0; i < WindowSize; i++) { int idx2 = (data_count + idx - WindowSize) + i; if (idx2 < 0) { input_val = 0; } else { input_val = norm.Stats.Normalize(data[idx2]); } input[i] = input_val; } IMLData output = network.Compute(input); double prediction = norm.Stats.DeNormalize(output[0]); data[data_count + idx] = prediction; ts.Add(futureTimes[idx], prediction, false); } return(ts); }
public void Optimizer_ObjectiveFunction(double[] solution, ref double fitnessValue) { Console.WriteLine(Optimizer.CurrentIteration); //Set kernal params : kernelG.Sigma = solution[0]; // Set paramsfor regression learning algorithm teacherSMOR.Complexity = solution[1]; teacherSMOR.Tolerance = solution[2]; teacherSMOR.Epsilon = solution[3]; // Use the teacher to create a machine svm = teacherSMOR.Learn(LearningInputs, LearningOutputs); // Check if we got support vectors if (svm.SupportVectors.Length == 0) { Console.WriteLine("Sorry, No SVMs."); return; } // Compute results for learning and testing data _Computed_LearningOutputs = svm.Score(LearningInputs); _Computed_TestingOutputs = svm.Score(TestingInputs); // Compute statistical LearningIndex = Statistics.Compute_RMSE(LearningOutputs, _Computed_LearningOutputs); TestingIndex = Statistics.Compute_RMSE(TestingOutputs, _Computed_TestingOutputs); // Compute correlation R for learning and testing to controle results : var Rlern = Statistics.Compute_CorrelationCoeff_R(LearningOutputs, _Computed_LearningOutputs); var Rtest = Statistics.Compute_CorrelationCoeff_R(TestingOutputs, _Computed_TestingOutputs); Console.WriteLine("Index (learn) = {0} | Index (test) = {1} ; Correlation : R (learn) = {2} | R (test) = {3}", LearningIndex, TestingIndex, Rlern, Rtest); if (BestLearningScore < LearningIndex && BestTestingScore < TestingIndex) { BestLearningScore = LearningIndex; BestTestingScore = TestingIndex; } //set the fitness value fitnessValue = Math.Pow(LearningIndex, 2) + Math.Pow(TestingIndex, 2); }
/// <summary> /// Constructs a new one-class support vector learning algorithm. /// </summary> /// /// <param name="machine">A support vector machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// public OneclassSupportVectorLearning(SupportVectorMachine machine, double[][] inputs) { // Initial argument checking if (machine == null) { throw new ArgumentNullException("machine"); } if (inputs == null) { throw new ArgumentNullException("inputs"); } this.inputs = inputs; this.machine = machine; this.zeros = new double[inputs.Length]; this.ones = new int[inputs.Length]; this.alpha = new double[inputs.Length]; for (int i = 0; i < alpha.Length; i++) { alpha[i] = 1; } for (int i = 0; i < ones.Length; i++) { ones[i] = 1; } // Kernel (if applicable) var ksvm = machine as KernelSupportVectorMachine; if (ksvm == null) { kernel = new Linear(0); } else { kernel = ksvm.Kernel; } }
public static void Generate(string fileName) { FileInfo dataDir = new FileInfo(@Environment.CurrentDirectory); IMarketLoader loader = new CSVFinal(); var market = new MarketMLDataSet(loader, CONFIG.INPUT_WINDOW, CONFIG.PREDICT_WINDOW); // var desc = new MarketDataDescription(Config.TICKER, MarketDataType.Close, true, true); var desc = new MarketDataDescription(CONFIG.TICKER, MarketDataType.Close, true, true); market.AddDescription(desc); string currentDirectory = @"c:\"; loader.GetFile(fileName); var end = DateTime.Now; // end today var begin = new DateTime(end.Ticks); // begin 30 days ago // Gather training data for the last 2 years, stopping 60 days short of today. // The 60 days will be used to evaluate prediction. begin = begin.AddDays(-600); end = begin.AddDays(200); Console.WriteLine("You are loading date from:" + begin.ToShortDateString() + " To :" + end.ToShortDateString()); market.Load(begin, end); market.Generate(); EncogUtility.SaveEGB(FileUtil.CombinePath(dataDir, CONFIG.SVMTRAINING_FILE), market); // create a network //BasicNetwork network = EncogUtility.SimpleFeedForward( // market.InputSize, // CONFIG.HIDDEN1_COUNT, // CONFIG.HIDDEN2_COUNT, // market.IdealSize, // true); SupportVectorMachine network = new SupportVectorMachine(CONFIG.INPUT_WINDOW, true); TrainNetworks(network, market); // save the network and the training EncogDirectoryPersistence.SaveObject(FileUtil.CombinePath(dataDir, CONFIG.SVMTRAINING_FILE), network); }
public void KernelTest1() { var dataset = SequentialMinimalOptimizationTest.GetYingYang(); double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged(); int[] labels = dataset.GetColumn(2).ToInt32(); double e1, e2; double[] w1, w2; { Accord.Math.Random.Generator.Seed = 0; var svm = new SupportVectorMachine(inputs: 2); var teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels); teacher.Tolerance = 1e-10; teacher.Complexity = 1e+10; e1 = teacher.Run(); w1 = svm.ToWeights(); } { Accord.Math.Random.Generator.Seed = 0; var svm = new KernelSupportVectorMachine(new Linear(0), inputs: 2); var teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels); teacher.Tolerance = 1e-10; teacher.Complexity = 1e+10; e2 = teacher.Run(); w2 = svm.ToWeights(); } Assert.AreEqual(e1, e2); Assert.AreEqual(w1.Length, w2.Length); Assert.AreEqual(w1[0], w2[0], 1e-8); Assert.AreEqual(w1[1], w2[1], 1e-8); Assert.AreEqual(w1[2], w2[2], 1e-8); }
private static void checkInputs(SupportVectorMachine machine, double[][] inputs) { if (inputs.Length == 0) { throw new ArgumentOutOfRangeException("inputs", "Training algorithm needs at least one training vector."); } if (machine.Inputs > 0) { // This machine has a fixed input vector size for (int i = 0; i < inputs.Length; i++) { if (inputs[i] == null) { throw new ArgumentNullException("inputs", "The input vector at index " + i + " is null."); } if (inputs[i].Length != machine.Inputs) { throw new DimensionMismatchException("inputs", "The size of the input vector at index " + i + " does not match the expected number of inputs of the machine." + " All input vectors for this machine must have length " + machine.Inputs); } for (int j = 0; j < inputs[i].Length; j++) { if (Double.IsNaN(inputs[i][j])) { throw new ArgumentException("The input vector at index " + i + " contains NaN values."); } if (Double.IsInfinity(inputs[i][j])) { throw new ArgumentException("The input vector at index " + i + " contains infinity values."); } } } } }
public void LearnTest() { double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = { -1, 1, 1, -1 }; var kernel = new Polynomial(2, 0.0); double[][] augmented = new double[inputs.Length][]; for (int i = 0; i < inputs.Length; i++) { augmented[i] = kernel.Transform(inputs[i]); } SupportVectorMachine machine = new SupportVectorMachine(augmented[0].Length); // Create the Least Squares Support Vector Machine teacher var learn = new LinearDualCoordinateDescent(machine, augmented, xor); // Run the learning algorithm double error = learn.Run(); Assert.AreEqual(0, error); int[] output = augmented.Apply(p => Math.Sign(machine.Compute(p))); for (int i = 0; i < output.Length; i++) { Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i])); } }
public double Learn(double[][] observations, int[] labels) { //var learn = new LinearDualCoordinateDescent() //{ // Loss = Loss.L2, // Complexity = 1000, // Tolerance = 1e-5 //}; SequentialMinimalOptimization learn = new SequentialMinimalOptimization() { UseComplexityHeuristic = true, UseKernelEstimation = false }; machine = learn.Learn(observations, labels); bool[] output = machine.Decide(observations); int[] zeroOneAnswers = output.ToZeroOne(); return(1 - (new AccuracyLoss(labels).Loss(zeroOneAnswers))); }
/// <summary> /// Constructs a new Least Squares SVM (LS-SVM) learning algorithm. /// </summary> /// /// <param name="machine">A support vector machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param> /// public LeastSquaresLearning(SupportVectorMachine machine, double[][] inputs, int[] outputs) { SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs); // Set the machine this.machine = machine; // Grab the machine kernel KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine; this.kernel = (ksvm == null) ? new Linear() : ksvm.Kernel; // Kernel cache this.cacheSize = inputs.Length; // Get learning data this.inputs = inputs; this.outputs = outputs; this.ones = Matrix.Vector(outputs.Length, 1); }
public static double TrainSVM(SVMTrain train, SupportVectorMachine machine) { StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200); train.AddStrategy(stop); var sw = new Stopwatch(); sw.Start(); while (!stop.ShouldStop()) { train.PreIteration(); train.Iteration(); train.PostIteration(); Console.WriteLine(@"Iteration #:" + train.IterationNumber + @" Error:" + train.Error + " Gamma:" + train.Gamma); } sw.Stop(); Console.WriteLine(@"SVM Trained in :" + sw.ElapsedMilliseconds); return(train.Error); }
public void LearnTest5() { double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] positives = { 1, 1, 1, 1 }; // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree SupportVectorMachine machine = new SupportVectorMachine(inputs[0].Length); // Create the sequential minimal optimization teacher SequentialMinimalOptimization learn = new SequentialMinimalOptimization(machine, inputs, positives); learn.Complexity = 1; // Run the learning algorithm double error = learn.Run(); Assert.AreEqual(0, error); int[] output = inputs.Apply(p => (int)machine.Compute(p)); for (int i = 0; i < output.Length; i++) { bool sor = positives[i] >= 0; bool sou = output[i] >= 0; Assert.AreEqual(sor, sou); } }
private static void checkInputs(SupportVectorMachine machine, double[][] inputs) { if (inputs.Length == 0) throw new ArgumentOutOfRangeException("inputs", "Training algorithm needs at least one training vector."); if (machine.Inputs > 0) { // This machine has a fixed input vector size for (int i = 0; i < inputs.Length; i++) { if (inputs[i] == null) { throw new ArgumentNullException("inputs", "The input vector at index " + i + " is null."); } if (inputs[i].Length != machine.Inputs) { throw new DimensionMismatchException("inputs", "The size of the input vector at index " + i + " does not match the expected number of inputs of the machine." + " All input vectors for this machine must have length " + machine.Inputs); } for (int j = 0; j < inputs[i].Length; j++) { if (Double.IsNaN(inputs[i][j])) throw new ArgumentException("The input vector at index " + i + " contains NaN values."); if (Double.IsInfinity(inputs[i][j])) throw new ArgumentException("The input vector at index " + i + " contains infinity values."); } } } }
/// <summary> /// Constructs a new Newton method algorithm for L2-regularized /// Support Vector Classification problems in the primal form (-s 2). /// </summary> /// /// <param name="machine">A support vector machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param> /// public LinearNewtonMethod(SupportVectorMachine machine, double[][] inputs, int[] outputs) : base(machine, inputs, outputs) { if (!IsLinear) throw new ArgumentException("Only linear machines are supported.", "machine"); int samples = inputs.Length; int parameters = machine.Inputs + 1; this.z = new double[samples]; this.I = new int[samples]; this.wa = new double[samples]; this.g = new double[parameters]; this.h = new double[parameters]; this.biasIndex = machine.Inputs; tron = new TrustRegionNewtonMethod(parameters); }
/// <summary> /// Initializes a new instance of Platt's Probabilistic Output Calibration algorithm. /// </summary> /// /// <param name="machine">A Support Vector Machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The classification label for each data point in the range [-1;+1].</param> /// public ProbabilisticOutputCalibration(SupportVectorMachine machine, double[][] inputs, int[] outputs) { // Initial argument checking if (machine == null) throw new ArgumentNullException("machine"); if (inputs == null) throw new ArgumentNullException("inputs"); if (outputs == null) throw new ArgumentNullException("outputs"); if (inputs.Length != outputs.Length) throw new ArgumentException("The number of inputs and outputs does not match.", "outputs"); for (int i = 0; i < outputs.Length; i++) { if (outputs[i] == 1) positives++; else if (outputs[i] == -1) negatives++; else throw new ArgumentOutOfRangeException("outputs", "One of the labels in the output vector is neither +1 or -1."); } if (machine.Inputs > 0) { // This machine has a fixed input vector size for (int i = 0; i < inputs.Length; i++) if (inputs[i].Length != machine.Inputs) throw new ArgumentException("The size of the input vectors does not match the expected number of inputs of the machine"); } if (machine.Weights == null) throw new ArgumentException("The machine should have been trained by another method first.", "machine"); // Machine this.machine = machine; // Learning data this.inputs = inputs; this.outputs = outputs; this.distances = new double[outputs.Length]; this.targets = new double[outputs.Length]; }
/// <summary> /// Construct a trainer for an SVM network. /// </summary> /// /// <param name="method">The network to train.</param> /// <param name="dataSet">The training data for this network.</param> public SVMTrain(SupportVectorMachine method, IMLDataSet dataSet) : base(TrainingImplementationType.OnePass) { _fold = 0; _network = method; Training = dataSet; _trainingDone = false; _problem = EncodeSVMProblem.Encode(dataSet, 0); _gamma = 1.0d/_network.InputCount; _c = 1.0d; }
/// <summary> /// Initializes a new instance of a Sequential Minimal Optimization (SMO) algorithm. /// </summary> /// /// <param name="machine">A Support Vector Machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param> /// public SequentialMinimalOptimization(SupportVectorMachine machine, double[][] inputs, int[] outputs) { // Initial argument checking SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs); // Machine this.machine = machine; // Kernel (if applicable) KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine; if (ksvm == null) { isLinear = true; Linear linear = new Linear(); kernel = linear; } else { Linear linear = ksvm.Kernel as Linear; isLinear = linear != null; kernel = ksvm.Kernel; } // Learning data this.inputs = inputs; this.outputs = outputs; int samples = inputs.Length; int dimension = inputs[0].Length; // Lagrange multipliers this.alpha = new double[inputs.Length]; if (isLinear) // Hyperplane weights this.weights = new double[dimension]; // Error cache this.errors = new double[samples]; // Kernel cache this.cacheSize = samples; // Index sets activeExamples = new HashSet<int>(); nonBoundExamples = new HashSet<int>(); atBoundsExamples = new HashSet<int>(); }
/// <summary> /// Constructs a new coordinate descent algorithm for L1-loss and L2-loss SVM dual problems. /// </summary> /// /// <param name="machine">A support vector machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param> /// public LinearDualCoordinateDescent(SupportVectorMachine machine, double[][] inputs, int[] outputs) : base(machine, inputs, outputs) { int samples = inputs.Length; int dimension = inputs[0].Length; if (!IsLinear) throw new ArgumentException("Only linear machines are supported.", "machine"); // Lagrange multipliers this.alpha = new double[samples]; this.weights = new double[dimension]; }
public void TestSOM2() { // create the training set IMLDataSet training = new BasicMLDataSet( SOMInput2, null); // Create the neural network. var network = new SOMNetwork(4,4); var train = new BasicTrainSOM(network, 0.01, training, new NeighborhoodSingle()) { ForceWinner = true }; int iteration = 0; for (iteration = 0; iteration <= 1000; iteration++) { train.Iteration(); } IMLData data1 = new BasicMLData( SOMInput2[2]); IMLData data2 = new BasicMLData( SOMInput2[0]); IMLData data3 = new BasicMLData( SOMInput2[1]); IMLData data4 = new BasicMLData( SOMInput2[3]); int result1 = network.Classify(data1); int result2 = network.Classify(data2); int result3 = network.Classify(data3); int result4 = network.Classify(data4); Console.WriteLine("Winner in someinput 2 "+network.Winner(new BasicMLData(SOMInput2[0]))); Console.WriteLine("First :" +result1); Console.WriteLine("Second "+result2); Console.WriteLine("Third :" + result3); Console.WriteLine("Fourth " + result4); Assert.IsTrue(result1 != result2); train.TrainPattern(new BasicMLData(SOMInput2[2])); Console.WriteLine("After training pattern: " + network.Winner(new BasicMLData(SOMInput2[1]))); var result = new SupportVectorMachine(4, SVMType.SupportVectorClassification, KernelType.Sigmoid); training = new BasicMLDataSet( SOMInput2, SOMInput2); SVMTrain trainsvm = new SVMTrain(result, training); trainsvm.Iteration(50); result1 = result.Classify(data1); result2 = result.Classify(data2); result3 = result.Classify(data3); result4 = result.Classify(data4); Console.WriteLine("SVM classification : EURUSD 1 :"+result1 + " GBPUSD:"+result2 + " EURCHF :"+result3+ " EURJPY:"+result4 ); }
/// <summary> /// Initializes a new instance of a Sequential Minimal Optimization (SMO) algorithm. /// </summary> /// /// <param name="machine">A Support Vector Machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The classification label for each data point.</param> /// public SequentialMinimalOptimizationRegression(SupportVectorMachine machine, double[][] inputs, double[] outputs) { // Initial argument checking if (machine == null) throw new ArgumentNullException("machine"); if (inputs == null) throw new ArgumentNullException("inputs"); if (outputs == null) throw new ArgumentNullException("outputs"); if (inputs.Length != outputs.Length) throw new ArgumentException("The number of inputs and outputs does not match.", "outputs"); if (machine.Inputs > 0) { // This machine has a fixed input vector size for (int i = 0; i < inputs.Length; i++) if (inputs[i].Length != machine.Inputs) throw new ArgumentException("The size of the input vectors does not match the expected number of inputs of the machine"); } // Machine this.machine = machine; // Kernel (if applicable) KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine; this.kernel = (ksvm != null) ? ksvm.Kernel : new Linear(); // Learning data this.inputs = inputs; this.outputs = outputs; }
//SVMの学習 private void LearnSVM_Click(object sender, RoutedEventArgs e) { if (rowDataList.Count > 0) { //SupportVectorMachineの作成 svm = new SupportVectorMachine(C, kernelMode, dataList = rowDataList.getDataList(POSITIVE_LABEL)); //Statusの更新 statusTextBlock.Text = Properties.Resources.TextStatusLearning; Task.Run(() => { svm.learn(); //非同期で後から this.Dispatcher.Invoke(() => { showSVMGraphMenuItem.IsEnabled = true; //Statusの更新 statusTextBlock.Text = Properties.Resources.TextStatusLearned; }); //学習ができたら分類してみる showSVMGraph(); }); } }
/// <summary> /// Constructs a new Newton method algorithm for L1-regularized /// logistic regression (probabilistic linear vector machine). /// </summary> /// /// <param name="machine">A support vector machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param> /// public ProbabilisticCoordinateDescent(SupportVectorMachine machine, double[][] inputs, int[] outputs) : base(machine, inputs, outputs) { if (!IsLinear) throw new ArgumentException("Only linear machines are supported.", "machine"); this.weights = new double[machine.Inputs + 1]; this.biasIndex = machine.Inputs; }
/// <summary> /// Constructs a new Sequential Minimal Optimization (SMO) algorithm. /// </summary> /// /// <param name="machine">A support vector machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param> /// public SequentialMinimalOptimization(SupportVectorMachine machine, double[][] inputs, int[] outputs) : base(machine, inputs, outputs) { int samples = inputs.Length; int dimension = inputs[0].Length; // Lagrange multipliers this.alpha = new double[inputs.Length]; if (IsLinear) // Hyperplane weights this.weights = new double[dimension]; // Error cache this.errors = new double[samples]; // Kernel cache this.cacheSize = samples; // Index sets activeExamples = new HashSet<int>(); nonBoundExamples = new HashSet<int>(); atBoundsExamples = new HashSet<int>(); }
/// <summary> /// Initializes a new instance of a Sequential Minimal Optimization (SMO) algorithm. /// </summary> /// /// <param name="machine">A Support Vector Machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The classification label for each data point in the range [-1;+1].</param> /// public SequentialMinimalOptimization(SupportVectorMachine machine, double[][] inputs, int[] outputs) { // Initial argument checking if (machine == null) throw new ArgumentNullException("machine"); if (inputs == null) throw new ArgumentNullException("inputs"); if (outputs == null) throw new ArgumentNullException("outputs"); if (inputs.Length != outputs.Length) throw new ArgumentException("The number of inputs and outputs does not match.", "outputs"); for (int i = 0; i < outputs.Length; i++) { if (outputs[i] != 1 && outputs[i] != -1) throw new ArgumentOutOfRangeException("outputs", "One of the labels in the output vector is neither +1 or -1."); } if (machine.Inputs > 0) { // This machine has a fixed input vector size for (int i = 0; i < inputs.Length; i++) if (inputs[i].Length != machine.Inputs) throw new ArgumentException("The size of the input vectors does not match the expected number of inputs of the machine"); } // Machine this.machine = machine; // Kernel (if applicable) KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine; if (ksvm == null) { isLinear = true; Linear linear = new Linear(); kernel = linear; } else { Linear linear = ksvm.Kernel as Linear; isLinear = linear != null; kernel = ksvm.Kernel; } // Learning data this.inputs = inputs; this.outputs = outputs; int samples = inputs.Length; int dimension = inputs[0].Length; // Lagrange multipliers this.alpha = new double[inputs.Length]; if (isLinear) // Hyperplane weights this.weights = new double[dimension]; // Error cache this.errors = new double[samples]; }