private void btnin_Click(object sender, RoutedEventArgs e) { if (GridSearch.XGReport.ActiveRecord == null) { return; } if (txtlien.Value != null) { int sl_in = Convert.ToInt16(StartUp.DsTrans.Tables[0].DefaultView[0]["sl_in"]); DataRowView drvXReport = (GridSearch.XGReport.ActiveRecord as DataRecord).DataItem as DataRowView; string nd51 = drvXReport["nd51"].ToString(); if (nd51 == "1" && sl_in > 0) { if (ExMessageBox.Show(390, StartUp.SysObj, "Hóa đơn đã được in, có muốn in lại hay không?", "", MessageBoxButton.YesNo, MessageBoxImage.Question, MessageBoxResult.No) == MessageBoxResult.No) { return; } FrmLogin login = new FrmLogin(); login.ShowDialog(); if (!login.IsLogined) { return; } } int so_lien = 1, so_lien_hd = 0, so_lien_xac_minh = 0; int so_lien_hien_thoi = Convert.ToInt32(StartUp.GetSo_lien((DataRecord)GridSearch.XGReport.ActiveRecord, StartUp.DsTrans.Tables[0].DefaultView[0]["stt_rec"].ToString())); int.TryParse(DsPrint.Tables["TablePH"].DefaultView[0]["so_lien_hd"].ToString(), out so_lien_hd); int.TryParse(StartUp.DmctInfo["so_lien_xac_minh"].ToString(), out so_lien_xac_minh); if (so_lien_hien_thoi > so_lien_hd) { DsPrint.Tables["TablePH"].DefaultView[0]["ban_sao"] = "BẢN SAO"; } int so_lan_in = Convert.ToInt16(Math.Ceiling(Convert.ToDouble(txtlien.Text) / Convert.ToDouble((GridSearch.XGReport.ActiveRecord as DataRecord).Cells["so_lien"].Value))); while (so_lien <= so_lan_in) { DsPrint.Tables["TablePH"].DefaultView[0]["so_lien"] = so_lien; if (so_lien_hien_thoi <= so_lien_hd) { if (so_lien <= so_lien_hien_thoi || so_lien > so_lien_hd) { DsPrint.Tables["TablePH"].DefaultView[0]["ban_sao"] = "BẢN SAO"; } else { DsPrint.Tables["TablePH"].DefaultView[0]["ban_sao"] = ""; } } InsertRowCT(nd51); GridSearch.V_In(1, (so_lien_xac_minh >= so_lien && string.IsNullOrEmpty(DsPrint.Tables["TablePH"].DefaultView[0]["ban_sao"].ToString()))); so_lien++; } //update sl_in và so_lien in if (nd51 == "1" && GridSearch.PrintSuccess) { string stt_rec = StartUp.DsTrans.Tables[0].DefaultView[0]["stt_rec"].ToString(); StartUp.UpdateSl_in(stt_rec, drvXReport["id"].ToString(), txtlien.Text); StartUp.DsTrans.Tables[0].DefaultView[0]["sl_in"] = StartUp.GetSl_in(stt_rec); } ResetTableCt(); StartUp.SetPhIn(this.DataContext as DataTable); } this.Close(); }
private void btnin_lt_Click(object sender, RoutedEventArgs e) { if (GridSearch.XGReport.ActiveRecord == null) { return; } DataRowView drvXReport = (GridSearch.XGReport.ActiveRecord as DataRecord).DataItem as DataRowView; string mau_tu_in = drvXReport["mau_tu_in"].ToString(); if (StartUp.IsQLHD && mau_tu_in == "1") { ExMessageBox.Show(395, StartUp.SysObj, "Có chứng từ thuộc mẫu hóa đơn tự in, không in liên tục được!", "", MessageBoxButton.OK, MessageBoxImage.Information); return; } if (StartUp.SysObj.GetOption("M_IN_HOI_CK").ToString() == "1") { if (ExMessageBox.Show(400, StartUp.SysObj, "Có chắc chắn in tất cả các chứng từ đã được lọc?", "", MessageBoxButton.YesNo, MessageBoxImage.Question, MessageBoxResult.No) == MessageBoxResult.Yes) { int so_lien; List <int> lstSo_lien = new List <int>(); if (txtlien.Value != null) { int iRowTmp = FrmTT_SOCTHDA_HDDT.iRow; so_lien = 1; bool isPrint = false; int so_lien_xac_minh = 0; int.TryParse(StartUp.DmctInfo["so_lien_xac_minh"].ToString(), out so_lien_xac_minh); int so_lan_in = Convert.ToInt16(Math.Ceiling(Convert.ToDouble(txtlien.Text) / Convert.ToDouble((GridSearch.XGReport.ActiveRecord as DataRecord).Cells["so_lien"].Value))); while (so_lien <= so_lan_in) { for (int i = 1; i < DsPrint.Tables[0].Rows.Count; i++) { string stt_rec = DsPrint.Tables[0].Rows[i]["stt_rec"].ToString(); DsPrint.Tables["TablePH"].DefaultView.RowFilter = "stt_rec= '" + stt_rec + "'"; DsPrint.Tables["TableCT"].DefaultView.RowFilter = "stt_rec= '" + stt_rec + "'"; DsPrint.Tables["TableCT"].DefaultView.Sort = "stt_rec0"; if (so_lien == 1) { lstSo_lien.Add(Convert.ToInt32(StartUp.GetSo_lien((DataRecord)GridSearch.XGReport.ActiveRecord, stt_rec))); } if (DsPrint.Tables[0].Rows[i]["status"].ToString() != "3") { int so_lien_hien_thoi = lstSo_lien[i - 1], so_lien_hd = 0; int.TryParse(DsPrint.Tables["TablePH"].DefaultView[0]["so_lien_hd"].ToString(), out so_lien_hd); if (so_lien_hien_thoi > so_lien_hd) { DsPrint.Tables["TablePH"].DefaultView[0]["ban_sao"] = "BẢN SAO"; } else { if (so_lien <= so_lien_hien_thoi || so_lien > so_lien_hd) { DsPrint.Tables["TablePH"].DefaultView[0]["ban_sao"] = "BẢN SAO"; } else { DsPrint.Tables["TablePH"].DefaultView[0]["ban_sao"] = ""; } } DsPrint.Tables["TablePH"].DefaultView[0]["so_lien"] = so_lien; int sl_in = Convert.ToInt16(StartUp.DsTrans.Tables[0].Rows[i]["sl_in"]); if (mau_tu_in == "1" && sl_in > 0 && !isPrint) { if (ExMessageBox.Show(405, StartUp.SysObj, "Hóa đơn đã được in, có muốn in lại hay không?", "", MessageBoxButton.YesNo, MessageBoxImage.Question, MessageBoxResult.No) == MessageBoxResult.No) { return; } FrmLogin login = new FrmLogin(); login.ShowDialog(); if (!login.IsLogined) { return; } // da thong bao roi, lan sau ko thong bao nua isPrint = true; } InsertRowCT(mau_tu_in); GridSearch.V_In(1, (so_lien_xac_minh >= so_lien && string.IsNullOrEmpty(DsPrint.Tables["TablePH"].DefaultView[0]["ban_sao"].ToString()))); if (mau_tu_in == "1" && so_lien == 1 && GridSearch.PrintSuccess) { StartUp.UpdateSl_in(stt_rec, drvXReport["id"].ToString(), txtlien.Text); StartUp.DsTrans.Tables[0].Rows[i]["sl_in"] = StartUp.GetSl_in(stt_rec); } } } so_lien++; } ResetTableCt(); DsPrint.Tables["TablePH"].DefaultView.RowFilter = "stt_rec= '" + DsPrint.Tables["TablePH"].Rows[iRowTmp]["stt_rec"].ToString() + "'"; DsPrint.Tables["TableCT"].DefaultView.RowFilter = "stt_rec= '" + DsPrint.Tables["TablePH"].Rows[iRowTmp]["stt_rec"].ToString() + "'"; DsPrint.Tables["TableCT"].DefaultView.Sort = "stt_rec0"; StartUp.SetPhIn(this.DataContext as DataTable); } this.Close(); } } }
public void GridsearchConstructorTest2() { Accord.Math.Tools.SetupGenerator(0); // Example binary data double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = // xor labels { -1, 1, 1, -1 }; // Declare the parameters and ranges to be searched GridSearchRange[] ranges = { new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 1000000, 0.50 } ), }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch<SupportVectorMachine>(ranges); // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. double complexity = parameters["complexity"].Value; // Use the parameters to build the SVM model SupportVectorMachine svm = new SupportVectorMachine(2); // Create a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, xor); smo.Complexity = complexity; // Measure the model performance to return as an out parameter error = smo.Run(); return svm; // Return the current model }; { // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine SupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(minError, 0.5); Assert.IsNotNull(bestModel); Assert.IsNotNull(bestParameters); Assert.AreEqual(bestParameters.Count, 1); } { // Compute the grid search to find the best Support Vector Machine var result = gridsearch.Compute(); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(result.Error, 0.5); Assert.IsNotNull(result.Model); Assert.AreEqual(5, result.Errors.Length); Assert.AreEqual(5, result.Models.Length); } }
public void BFS() { var path = GridSearch.BreadthFirstSearch(_grid, _startCell.GetPosition(), _endCell.GetPosition()); StartCoroutine(ShowPath(path)); }
public ModelScorerDiscrete(PhyloTree tree, GridSearch optimizer) : base(tree, optimizer) { }
protected ModelScorer(PhyloTree tree, GridSearch optimizer) { PhyloTree = tree; GridSearch = optimizer; }
public void create_test() { #region doc_create // Ensure results are reproducible Accord.Math.Random.Generator.Seed = 0; // Example binary data double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = // xor labels { -1, 1, 1, -1 }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = GridSearch <double[], int> .Create( ranges : new GridSearchRange[] { new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 0.50 }), new GridSearchRange("degree", new double[] { 1, 10, 2, 3, 4, 5 }), new GridSearchRange("constant", new double[] { 0, 1, 2 }) }, learner : (p) => new SequentialMinimalOptimization <Polynomial> { Complexity = p["complexity"], Kernel = new Polynomial((int)p["degree"].Value, p["constant"]) }, // Define how the model should be learned, if needed fit : (teacher, x, y, w) => teacher.Learn(x, y, w), // Define how the performance of the models should be measured loss : (actual, expected, m) => new ZeroOneLoss(expected).Loss(actual) ); // If needed, control the degree of CPU parallelization gridsearch.ParallelOptions.MaxDegreeOfParallelism = 1; // Search for the best model parameters var result = gridsearch.Learn(inputs, xor); // Get the best SVM generated during the search SupportVectorMachine <Polynomial> svm = result.BestModel; // Get an estimate for its error: double bestError = result.BestModelError; // Get the best values for its parameters: double bestC = result.BestParameters["complexity"].Value; double bestDegree = result.BestParameters["degree"].Value; double bestConstant = result.BestParameters["constant"].Value; #endregion Assert.IsNotNull(svm); Assert.AreEqual(1e-8, bestC, 1e-10); Assert.AreEqual(0, bestError, 1e-8); Assert.AreEqual(1, bestDegree, 1e-8); Assert.AreEqual(1, bestConstant, 1e-8); }
public void GridsearchConstructorTest2() { Accord.Math.Random.Generator.Seed = 0; // Example binary data double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = // xor labels { -1, 1, 1, -1 }; // Declare the parameters and ranges to be searched GridSearchRange[] ranges = { new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 1000000, 0.50 }), }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch <SupportVectorMachine>(ranges); gridsearch.ParallelOptions.MaxDegreeOfParallelism = 1; // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. double complexity = parameters["complexity"].Value; // Use the parameters to build the SVM model SupportVectorMachine svm = new SupportVectorMachine(2); // Create a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, xor); smo.Complexity = complexity; // Measure the model performance to return as an out parameter error = smo.Run(); return(svm); // Return the current model }; { // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine SupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(minError, 0.5); Assert.IsNotNull(bestModel); Assert.IsNotNull(bestParameters); Assert.AreEqual(bestParameters.Count, 1); } { // Compute the grid search to find the best Support Vector Machine var result = gridsearch.Compute(); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(result.Error, 0.5); Assert.IsNotNull(result.Model); Assert.AreEqual(5, result.Errors.Length); Assert.AreEqual(5, result.Models.Length); } }
internal static void Search(GridGraph graph, GridSearch search, int x, int y, float[,] weights, IComparer <GridEdge> comparer) { int width = graph.Width; int height = graph.Height; bool[,] isVisited = new bool[width, height]; isVisited[x, y] = true; search.Order.Add(new Vector2i(x, y)); if (search.Parent != null) { search.Parent[x, y] = new Vector2i(x, y); } PriorityQueue <GridEdge> queue = new PriorityQueue <GridEdge>(comparer); List <GridEdge> edges = new List <GridEdge>(8); graph.GetEdges(x, y, edges, weights); if (edges.Count != 0) { foreach (GridEdge edge in edges) { queue.Push(edge); } edges.Clear(); } while (queue.Count != 0) { GridEdge edge = queue.Pop(); Vector2i v = edge.To; if (isVisited[v.x, v.y]) { continue; } search.Order.Add(v); isVisited[v.x, v.y] = true; if (search.Parent != null) { search.Parent[v.x, v.y] = edge.From; } if (graph.Edges[v.x, v.y] == 0) { continue; } graph.GetEdges(v.x, v.y, edges, weights); foreach (GridEdge e in edges) { if (isVisited[e.To.x, e.To.y]) { continue; } queue.Push(e); } edges.Clear(); } }
public void learn_test() { #region doc_learn // Ensure results are reproducible Accord.Math.Random.Generator.Seed = 0; // Example binary data double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = // xor labels { -1, 1, 1, -1 }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch <SupportVectorMachine <Polynomial>, double[], int>() { // Here we can specify the range of the parameters to be included in the search ParameterRanges = new GridSearchRangeCollection() { new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 0.50 }), new GridSearchRange("degree", new double[] { 1, 10, 2, 3, 4, 5 }), new GridSearchRange("constant", new double[] { 0, 1, 2 }) }, // Indicate how learning algorithms for the models should be created Learner = (p) => new SequentialMinimalOptimization <Polynomial> { Complexity = p["complexity"], Kernel = new Polynomial((int)p["degree"], p["constant"]) }, // Define how the performance of the models should be measured Loss = (actual, expected, m) => new ZeroOneLoss(expected).Loss(actual) }; // If needed, control the degree of CPU parallelization gridsearch.ParallelOptions.MaxDegreeOfParallelism = 1; // Search for the best model parameters var result = gridsearch.Learn(inputs, xor); // Get the best SVM found during the parameter search SupportVectorMachine <Polynomial> svm = result.BestModel; // Get an estimate for its error: double bestError = result.BestModelError; // Get the best values found for the model parameters: double bestC = result.BestParameters["complexity"].Value; double bestDegree = result.BestParameters["degree"].Value; double bestConstant = result.BestParameters["constant"].Value; #endregion Assert.IsNotNull(svm); Assert.AreEqual(1e-8, bestC, 1e-10); Assert.AreEqual(0, bestError, 1e-8); Assert.AreEqual(1, bestDegree, 1e-8); Assert.AreEqual(1, bestConstant, 1e-8); Assert.AreEqual(1, svm.Kernel.Degree); Assert.AreEqual(1, svm.Kernel.Constant); }
public void cross_validation_decision_tree() { #region doc_learn_tree_cv // Ensure results are reproducible Accord.Math.Random.Generator.Seed = 0; // This is a sample code showing how to use Grid-Search in combination with // Cross-Validation to assess the performance of Decision Trees with C4.5. var parkinsons = new Parkinsons(); double[][] input = parkinsons.Features; int[] output = parkinsons.ClassLabels; // Create a new Grid-Search with Cross-Validation algorithm. Even though the // generic, strongly-typed approach used accross the framework is most of the // time easier to handle, combining those both methods in a single call can be // difficult. For this reason. the framework offers a specialized method for // combining those two algorirthms: var gscv = GridSearch.CrossValidate( // Here we can specify the range of the parameters to be included in the search ranges: new { Join = GridSearch.Range(fromInclusive: 1, toExclusive: 20), MaxHeight = GridSearch.Range(fromInclusive: 1, toExclusive: 20), }, // Indicate how learning algorithms for the models should be created learner: (p, ss) => new C45Learning { // Here, we can use the parameters we have specified above: Join = p.Join, MaxHeight = p.MaxHeight, }, // Define how the model should be learned, if needed fit: (teacher, x, y, w) => teacher.Learn(x, y, w), // Define how the performance of the models should be measured loss: (actual, expected, r) => new ZeroOneLoss(expected).Loss(actual), folds: 3, // use k = 3 in k-fold cross validation x: input, y: output // so the compiler can infer generic types ); // If needed, control the parallelization degree gscv.ParallelOptions.MaxDegreeOfParallelism = 1; // Search for the best decision tree var result = gscv.Learn(input, output); // Get the best cross-validation result: var crossValidation = result.BestModel; // Get an estimate of its error: double bestAverageError = result.BestModelError; double trainError = result.BestModel.Training.Mean; double trainErrorVar = result.BestModel.Training.Variance; double valError = result.BestModel.Validation.Mean; double valErrorVar = result.BestModel.Validation.Variance; // Get the best values for the parameters: int bestJoin = result.BestParameters.Join; int bestHeight = result.BestParameters.MaxHeight; // Use the best parameter values to create the final // model using all the training and validation data: var bestTeacher = new C45Learning { Join = bestJoin, MaxHeight = bestHeight, }; // Use the best parameters to create the final tree model: DecisionTree finalTree = bestTeacher.Learn(input, output); #endregion int height = finalTree.GetHeight(); Assert.AreEqual(5, height); Assert.AreEqual(22, result.BestModel.NumberOfInputs); Assert.AreEqual(2, result.BestModel.NumberOfOutputs); Assert.AreEqual(195, result.BestModel.NumberOfSamples); Assert.AreEqual(65, result.BestModel.AverageNumberOfSamples); Assert.AreEqual(bestAverageError, valError); Assert.AreEqual(5, bestJoin, 1e-10); Assert.AreEqual(0.1076923076923077, bestAverageError, 1e-8); Assert.AreEqual(5, bestHeight, 1e-8); }
protected virtual void Filter(object sender, EventArgs e) { AutoCompleteWheretermFormater = null; BeforeFilter(); if (AutoCompleteWheretermFormater != null) { if (tbxCode.Text != "") { var param = AutoCompleteWheretermFormater; Array.Resize(ref param, param.Length + 1); param[param.Length - 1] = WhereTerm.Default(tbxCode.Text, CodeColumn, EnumSqlOperator.Like); AutoCompleteWheretermFormater = param; } } else { if (tbxCode.Text != "") { AutoCompleteWheretermFormater = new IListParameter[] { WhereTerm.Default(tbxCode.Text, CodeColumn, EnumSqlOperator.Like) }; } } var paramEmpty = true; foreach (Control o in MainContainer.Panel1.Controls) { if (o is TextBox) { if (o.Text != "") { paramEmpty = false; break; } } if (o is dTextBox) { if (o.Text != "") { paramEmpty = false; break; } } if (o is dTextBoxNumber) { if (o.Text != "") { paramEmpty = false; break; } } if (o is dCalendar) { if (((dCalendar)o).EditValue != null) { paramEmpty = false; o.Focus(); break; } } if (o is dLookupC) { if (((dLookupC)o).Value != null) { paramEmpty = false; break; } } if (o is ComboBox) { if (((ComboBox)o).SelectedValue != null) { paramEmpty = false; break; } } } if (paramEmpty) { MessageBox.Show(@"Masukkan parameter pencarian", Resources.title_information, MessageBoxButtons.OK); return; } if (ByPaging) { PageLimit = 10; PagingForm = new Paging { Direction = SortDirection, SortColumn = SortColumn }; CurrentFilter = GotoFirstPage <TModel>(sender, e); } else { CurrentFilter = DataManager.Get <TModel>(AutoCompleteWheretermFormater); } GridSearch.DataSource = CurrentFilter; SearchView.RefreshData(); NavigatorPagingState = PagingState; GridSearch.Focus(); }
private Tuple <MulticlassSupportVectorMachine <Gaussian>, double, double, double> TrainingPaper(List <double[]> inputsList, List <int> outputsList) { var gridsearch = GridSearch <double[], int> .CrossValidate( // Here we can specify the range of the parameters to be included in the search ranges : new { Complexity = GridSearch.Values(Math.Pow(2, -12), Math.Pow(2, -11), Math.Pow(2, -10), Math.Pow(2, -8), Math.Pow(2, -6), Math.Pow(2, -4), Math.Pow(2, -2), Math.Pow(2, 0), Math.Pow(2, 2), Math.Pow(2, 4), Math.Pow(2, 6), Math.Pow(2, 8), Math.Pow(2, 10), Math.Pow(2, 11), Math.Pow(2, 12)), Gamma = GridSearch.Values(Math.Pow(2, -12), Math.Pow(2, -11), Math.Pow(2, -10), Math.Pow(2, -8), Math.Pow(2, -6), Math.Pow(2, -4), Math.Pow(2, -2), Math.Pow(2, 0), Math.Pow(2, 2), Math.Pow(2, 4), Math.Pow(2, 6), Math.Pow(2, 8), Math.Pow(2, 10), Math.Pow(2, 11), Math.Pow(2, 12)) }, // Indicate how learning algorithms for the models should be created learner : (p, ss) => new MulticlassSupportVectorLearning <Gaussian>() { // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. Learner = (param) => new SequentialMinimalOptimization <Gaussian>() { // Estimate a suitable guess for the Gaussian kernel's parameters. // This estimate can serve as a starting point for a grid search. //UseComplexityHeuristic = true, //UseKernelEstimation = true Complexity = p.Complexity, Kernel = Gaussian.FromGamma(p.Gamma) } }, // Define how the model should be learned, if needed fit : (teacher, x, y, w) => teacher.Learn(x, y, w), // Define how the performance of the models should be measured /*loss: (actual, expected, m) => * { * double totalError = 0; * foreach (var input in _originalInputsList) * { * if (!m.Decide(input.Item1).Equals(input.Item2)) * { * totalError++; * } * } * return totalError / _originalInputsList.Count; * },*/ loss : (actual, expected, m) => new HammingLoss(expected).Loss(actual), folds : 10 ); gridsearch.ParallelOptions.MaxDegreeOfParallelism = _paralelism; Console.WriteLine("y nos ponemos a aprender"); // Search for the best model parameters var result = gridsearch.Learn(inputsList.ToArray(), outputsList.ToArray()); Console.WriteLine("Error modelo: " + result.BestModelError); var model = CreateModel(inputsList, outputsList, result.BestParameters.Complexity, result.BestParameters.Gamma); double error = 0; Console.WriteLine("Largo: " + _originalInputsList.Count); foreach (var input in _originalInputsList) { if (!model.Decide(input.Item1).Equals(input.Item2)) { error++; } } error = error / (_originalInputsList.Count); Console.WriteLine("Error real: " + error); return(new Tuple <MulticlassSupportVectorMachine <Gaussian>, double, double, double>(model, error, result.BestParameters.Gamma.Value, result.BestParameters.Complexity.Value)); }
protected void DropDownTickets_SelectedIndexChanged(object sender, EventArgs e) { if (DropDownTickets.SelectedValue == "$1.00-$10.00") { string connectionstring2 = WebConfigurationManager.ConnectionStrings["CapstoneSQLConn"].ConnectionString; string selectsql2 = "select e.name as 'Event', c.name as 'Artist', e.startDate as 'Date', e.startTime as 'Time', v.name as 'Venue', a.price as 'Ticket Price', s.name as 'Seat Section' from [Event] as e, [Venue] as v, [Section] as s, [AvailableTickets] as a, [Artist] as c where e.venueID = v.venueID and e.eventID = a.eventID and a.sectionID = s.sectionID and e.artistID = c.artistID and a.price between 1 and 10 order by a.price ASC"; SqlConnection connect2 = new SqlConnection(connectionstring2); SqlCommand cmd = new SqlCommand(selectsql2, connect2); SqlDataAdapter adapter = new SqlDataAdapter(cmd); DataSet ds2 = new DataSet(); adapter.Fill(ds2); GridSearch.DataSource = ds2; GridSearch.DataBind(); if (ds2.Tables[0].Rows.Count == 0) { lblerror.Text = "*Your Search Returned Zero Result!!, Please Try Searching Again!!!"; } else { lblerror.Text = ""; } } if (DropDownTickets.SelectedValue == "$11.00-$20.00") { string connectionstring2 = WebConfigurationManager.ConnectionStrings["CapstoneSQLConn"].ConnectionString; string selectsql2 = "select e.name as 'Event', c.name as 'Artist', e.startDate as 'Date', e.startTime as 'Time', v.name as 'Venue', a.price as 'Ticket Price', s.name as 'Seat Section' from [Event] as e, [Venue] as v, [Section] as s, [AvailableTickets] as a, [Artist] as c where e.venueID = v.venueID and e.eventID = a.eventID and a.sectionID = s.sectionID and e.artistID = c.artistID and a.price between 11 and 20 order by a.price ASC"; SqlConnection connect2 = new SqlConnection(connectionstring2); SqlCommand cmd = new SqlCommand(selectsql2, connect2); SqlDataAdapter adapter = new SqlDataAdapter(cmd); DataSet ds2 = new DataSet(); adapter.Fill(ds2); GridSearch.DataSource = ds2; GridSearch.DataBind(); if (ds2.Tables[0].Rows.Count == 0) { lblerror.Text = "*Your Search Returned Zero Result!!, Please Try Searching Again!!!"; } else { lblerror.Text = ""; } } if (DropDownTickets.SelectedValue == "$21.00-$30.00") { string connectionstring2 = WebConfigurationManager.ConnectionStrings["CapstoneSQLConn"].ConnectionString; string selectsql2 = "select e.name as 'Event', c.name as 'Artist', e.startDate as 'Date', e.startTime as 'Time', v.name as 'Venue', a.price as 'Ticket Price', s.name as 'Seat Section' from [Event] as e, [Venue] as v, [Section] as s, [AvailableTickets] as a, [Artist] as c where e.venueID = v.venueID and e.eventID = a.eventID and a.sectionID = s.sectionID and e.artistID = c.artistID and a.price between 21 and 30 order by a.price ASC"; SqlConnection connect2 = new SqlConnection(connectionstring2); SqlCommand cmd = new SqlCommand(selectsql2, connect2); SqlDataAdapter adapter = new SqlDataAdapter(cmd); DataSet ds2 = new DataSet(); adapter.Fill(ds2); GridSearch.DataSource = ds2; GridSearch.DataBind(); if (ds2.Tables[0].Rows.Count == 0) { lblerror.Text = "*Your Search Returned Zero Result!!, Please Try Searching Again!!!"; } else { lblerror.Text = ""; } } if (DropDownTickets.SelectedValue == "$31.00-$40.00") { string connectionstring2 = WebConfigurationManager.ConnectionStrings["CapstoneSQLConn"].ConnectionString; string selectsql2 = "select e.name as 'Event', c.name as 'Artist', e.startDate as 'Date', e.startTime as 'Time', v.name as 'Venue', a.price as 'Ticket Price', s.name as 'Seat Section' from [Event] as e, [Venue] as v, [Section] as s, [AvailableTickets] as a, [Artist] as c where e.venueID = v.venueID and e.eventID = a.eventID and a.sectionID = s.sectionID and e.artistID = c.artistID and a.price between 31 and 40 order by a.price ASC"; SqlConnection connect2 = new SqlConnection(connectionstring2); SqlCommand cmd = new SqlCommand(selectsql2, connect2); SqlDataAdapter adapter = new SqlDataAdapter(cmd); DataSet ds2 = new DataSet(); adapter.Fill(ds2); GridSearch.DataSource = ds2; GridSearch.DataBind(); if (ds2.Tables[0].Rows.Count == 0) { lblerror.Text = "*Your Search Returned Zero Result!!, Please Try Searching Again!!!"; } else { lblerror.Text = ""; } } if (DropDownTickets.SelectedValue == "$41.00-$50.00") { string connectionstring2 = WebConfigurationManager.ConnectionStrings["CapstoneSQLConn"].ConnectionString; string selectsql2 = "select e.name as 'Event', c.name as 'Artist', e.startDate as 'Date', e.startTime as 'Time', v.name as 'Venue', a.price as 'Ticket Price', s.name as 'Seat Section' from [Event] as e, [Venue] as v, [Section] as s, [AvailableTickets] as a, [Artist] as c where e.venueID = v.venueID and e.eventID = a.eventID and a.sectionID = s.sectionID and e.artistID = c.artistID and a.price between 41 and 50 order by a.price ASC"; SqlConnection connect2 = new SqlConnection(connectionstring2); SqlCommand cmd = new SqlCommand(selectsql2, connect2); SqlDataAdapter adapter = new SqlDataAdapter(cmd); DataSet ds2 = new DataSet(); adapter.Fill(ds2); GridSearch.DataSource = ds2; GridSearch.DataBind(); if (ds2.Tables[0].Rows.Count == 0) { lblerror.Text = "*Your Search Returned Zero Result!!, Please Try Searching Again!!!"; } else { lblerror.Text = ""; } } if (DropDownTickets.SelectedValue == "$50.00 or Above") { string connectionstring2 = WebConfigurationManager.ConnectionStrings["CapstoneSQLConn"].ConnectionString; string selectsql2 = "select e.name as 'Event', c.name as 'Artist', e.startDate as 'Date', e.startTime as 'Time', v.name as 'Venue', a.price as 'Ticket Price', s.name as 'Seat Section' from [Event] as e, [Venue] as v, [Section] as s, [AvailableTickets] as a, [Artist] as c where e.venueID = v.venueID and e.eventID = a.eventID and a.sectionID = s.sectionID and e.artistID = c.artistID and a.price > 50 order by a.price ASC"; SqlConnection connect2 = new SqlConnection(connectionstring2); SqlCommand cmd = new SqlCommand(selectsql2, connect2); SqlDataAdapter adapter = new SqlDataAdapter(cmd); DataSet ds2 = new DataSet(); adapter.Fill(ds2); GridSearch.DataSource = ds2; GridSearch.DataBind(); if (ds2.Tables[0].Rows.Count == 0) { lblerror.Text = "*Your Search Returned Zero Result!!, Please Try Searching Again!!!"; } else { lblerror.Text = ""; } } }
public void learn_test_strongly_typed() { #region doc_learn_strongly_typed // Ensure results are reproducible Accord.Math.Random.Generator.Seed = 0; // This is a sample code showing how to use Grid-Search in combination with // Cross-Validation to assess the performance of Support Vector Machines. // Consider the example binary data. We will be trying to learn a XOR // problem and see how well does SVMs perform on this data. double[][] inputs = { new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, }; int[] xor = // result of xor for the sample input data { -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, }; // Create a new Grid-Search with Cross-Validation algorithm. Even though the // generic, strongly-typed approach used accross the framework is most of the // time easier to handle, meta-algorithms such as grid-search can be a bit hard // to setup. For this reason. the framework offers a specialized method for it: var gridsearch = GridSearch <double[], int> .Create( // Here we can specify the range of the parameters to be included in the search ranges : new { Kernel = GridSearch.Range(new IKernel[] { new Linear(), new ChiSquare(), new Gaussian(), new Sigmoid() }), Complexity = GridSearch.Range(new[] { 0.00000001, 5.20, 0.30, 0.50 }), Tolerance = GridSearch.Range(Vector.Range(1e-10, 1.0, stepSize: 0.05)) }, // Indicate how learning algorithms for the models should be created learner : (p) => new SequentialMinimalOptimization <IKernel> { Complexity = p.Complexity, Kernel = p.Kernel.Value, Tolerance = p.Tolerance }, // Define how the model should be learned, if needed fit : (teacher, x, y, w) => teacher.Learn(x, y, w), // Define how the performance of the models should be measured loss : (actual, expected, m) => new ZeroOneLoss(expected).Loss(actual) ); // If needed, control the degree of CPU parallelization gridsearch.ParallelOptions.MaxDegreeOfParallelism = 1; // Search for the best model parameters var result = gridsearch.Learn(inputs, xor); // Get the best SVM: SupportVectorMachine <IKernel> svm = result.BestModel; // Estimate its error: double bestError = result.BestModelError; // Get the best values for the parameters: double bestC = result.BestParameters.Complexity; double bestTolerance = result.BestParameters.Tolerance; IKernel bestKernel = result.BestParameters.Kernel.Value; #endregion Assert.IsNotNull(svm); Assert.AreEqual(1e-8, bestC, 1e-10); Assert.AreEqual(0, bestError, 1e-8); Assert.AreEqual(0, bestTolerance, 1e-8); Assert.AreEqual(typeof(Gaussian), bestKernel.GetType()); }
protected void GridSearch_PageIndexChanging(object sender, GridViewPageEventArgs e) { GridSearch.PageIndex = e.NewPageIndex; GridSearch.DataBind(); }
public void cross_validation_test() { #region doc_learn_cv // Ensure results are reproducible Accord.Math.Random.Generator.Seed = 0; // This is a sample code showing how to use Grid-Search in combination with // Cross-Validation to assess the performance of Support Vector Machines. // Consider the example binary data. We will be trying to learn a XOR // problem and see how well does SVMs perform on this data. double[][] inputs = { new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, }; int[] xor = // result of xor for the sample input data { -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, }; // Create a new Grid-Search with Cross-Validation algorithm. Even though the // generic, strongly-typed approach used accross the framework is most of the // time easier to handle, combining those both methods in a single call can be // difficult. For this reason. the framework offers a specialized method for // combining those two algorirthms: var gscv = GridSearch <double[], int> .CrossValidate( // Here we can specify the range of the parameters to be included in the search ranges : new { Complexity = GridSearch.Range(new double[] { 0.00000001, 5.20, 0.30, 0.50 }), Degree = GridSearch.Range(new int[] { 1, 10, 2, 3, 4, 5 }), Constant = GridSearch.Range(new double[] { 0, 1, 2 }), }, // Indicate how learning algorithms for the models should be created learner : (p, ss) => new SequentialMinimalOptimization <Polynomial> { // Here, we can use the parameters we have specified above: Complexity = p.Complexity, Kernel = new Polynomial(p.Degree, p.Constant) }, // Define how the model should be learned, if needed fit : (teacher, x, y, w) => teacher.Learn(x, y, w), // Define how the performance of the models should be measured loss : (actual, expected, r) => new ZeroOneLoss(expected).Loss(actual), folds : 3 // use k = 3 in k-fold cross validation ); // If needed, control the parallelization degree gscv.ParallelOptions.MaxDegreeOfParallelism = 1; // Search for the best vector machine var result = gscv.Learn(inputs, xor); // Get the best cross-validation result: var crossValidation = result.BestModel; // Estimate its error: double bestError = result.BestModelError; double trainError = result.BestModel.Training.Mean; double trainErrorVar = result.BestModel.Training.Variance; double valError = result.BestModel.Validation.Mean; double valErrorVar = result.BestModel.Validation.Variance; // Get the best values for the parameters: double bestC = result.BestParameters.Complexity; double bestDegree = result.BestParameters.Degree; double bestConstant = result.BestParameters.Constant; #endregion Assert.AreEqual(2, result.BestModel.NumberOfInputs); Assert.AreEqual(1, result.BestModel.NumberOfOutputs); Assert.AreEqual(16, result.BestModel.NumberOfSamples); Assert.AreEqual(5.333333333333333, result.BestModel.AverageNumberOfSamples); Assert.AreEqual(1e-8, bestC, 1e-10); Assert.AreEqual(0, bestError, 1e-8); Assert.AreEqual(10, bestDegree, 1e-8); Assert.AreEqual(0, bestConstant, 1e-8); }
public void GridsearchConstructorTest() { Accord.Math.Random.Generator.Seed = 0; // Example binary data double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = // xor labels { -1, 1, 1, -1 }; // Declare the parameters and ranges to be searched GridSearchRange[] ranges = { new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 0.50 }), new GridSearchRange("degree", new double[] { 1, 10, 2,3, 4, 5 }), new GridSearchRange("constant", new double[] { 0, 1, 2 }) }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch <KernelSupportVectorMachine>(ranges); #if DEBUG gridsearch.ParallelOptions.MaxDegreeOfParallelism = 1; #endif // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. int degree = (int)parameters["degree"].Value; double constant = parameters["constant"].Value; double complexity = parameters["complexity"].Value; // Use the parameters to build the SVM model Polynomial kernel = new Polynomial(degree, constant); KernelSupportVectorMachine ksvm = new KernelSupportVectorMachine(kernel, 2); // Create a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(ksvm, inputs, xor); smo.Complexity = complexity; // Measure the model performance to return as an out parameter error = smo.Run(); return(ksvm); // Return the current model }; // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine KernelSupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError); // A linear kernel can't solve the xor problem. Assert.AreEqual(1, bestParameters["degree"].Value); Assert.AreEqual(1, bestParameters["constant"].Value); Assert.AreEqual(1e-8, bestParameters["complexity"].Value); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(minError, 0.0); Assert.IsNotNull(bestModel); Assert.IsNotNull(bestParameters); Assert.AreEqual(bestParameters.Count, 3); }
public void internals_test() { Accord.Math.Random.Generator.Seed = 0; string[] inputs = { "input 1", "input 2", "input 3", "input 4", }; string[] outputs = { "output 1", "output 2", "output 3", }; double[] weights = { 1.0, 2.0, 3.0 }; var lossModels = new List <Mapper>(); var ranges = new { Parameter1 = GridSearch.Range("parameter 11", "parameter 12"), Parameter2 = GridSearch.Range("parameter 21", "parameter 22", "parameter 23", "parameter 24"), Parameter3 = GridSearch.Range("parameter 31") }; var result = GridSearch.Create( ranges: ranges, learner: (p) => new MapperLearning { Parameter1 = p.Parameter1, Parameter2 = p.Parameter2, Parameter3 = p.Parameter3, }, fit: (teacher, x, y, w) => teacher.Learn(x, y, w), loss: (actual, expected, m) => { if (m.Parameter1 == "parameter 12" && m.Parameter2 == "parameter 21" && m.Parameter3 == "parameter 31") { return(-42); } lock (lossModels) { lossModels.Add(m); } return(Math.Abs(int.Parse(m.Parameter1.Replace("parameter ", "")) + 100 * int.Parse(m.Parameter2.Replace("parameter ", "")) + 10000 * int.Parse(m.Parameter3.Replace("parameter ", "")))); }, x: inputs, y: outputs, weights: weights ); Mapper bestModel = result.BestModel; Assert.AreEqual("parameter 12", bestModel.Parameter1); Assert.AreEqual("parameter 21", bestModel.Parameter2); Assert.AreEqual("parameter 31", bestModel.Parameter3); Assert.AreEqual(inputs, bestModel.Inputs); Assert.AreEqual(outputs, bestModel.Outputs); Assert.AreEqual(weights, bestModel.Weights); Assert.AreEqual(-42, result.BestModelError); Assert.AreEqual(4, result.BestModelIndex); var bestParameters = result.BestParameters; Assert.AreNotSame(ranges, bestParameters); Assert.AreEqual(1, bestParameters.Parameter1.Index); Assert.AreEqual(0, bestParameters.Parameter2.Index); Assert.AreEqual(0, bestParameters.Parameter3.Index); Assert.AreEqual("parameter 12", bestParameters.Parameter1.Value); Assert.AreEqual("parameter 21", bestParameters.Parameter2.Value); Assert.AreEqual("parameter 31", bestParameters.Parameter3.Value); Assert.AreEqual(8, result.Count); Assert.AreEqual(result.Errors, new double[] { 312111, 312211, 312311, 312411, -42, Double.PositiveInfinity, 312312, 312412 }); Exception[] exceptions = result.Exceptions; for (int i = 0; i < exceptions.Length; i++) { if (i != 5) { Assert.IsNull(exceptions[i]); } else { Assert.AreEqual("Exception test", exceptions[i].Message); } } Mapper[] models = result.Models; Assert.AreEqual(8, models.Length); Assert.AreEqual(6, lossModels.Count); int a = ranges.Parameter1.Length; int b = ranges.Parameter2.Length; int c = ranges.Parameter3.Length; Assert.AreEqual(2, a); Assert.AreEqual(4, b); Assert.AreEqual(1, c); for (int i = 0; i < models.Length; i++) { if (i == 5) { Assert.IsNull(models[i]); } else { Assert.AreEqual(inputs, models[i].Inputs); Assert.AreEqual(outputs, models[i].Outputs); Assert.AreEqual(weights, models[i].Weights); Assert.AreEqual(4, models[i].NumberOfInputs); Assert.AreEqual(2, models[i].NumberOfOutputs); Assert.AreEqual(ranges.Parameter1.Values[((i / c) / b) % a], models[i].Parameter1); Assert.AreEqual(ranges.Parameter2.Values[(i / c) % b], models[i].Parameter2); Assert.AreEqual(ranges.Parameter3.Values[i % c], models[i].Parameter3); if (i != 4) { Assert.IsTrue(lossModels.Contains(models[i])); } } } Assert.AreEqual(4, result.NumberOfInputs); Assert.AreEqual(2, result.NumberOfOutputs); var parameters = result.Parameters; for (int i = 0; i < parameters.Length; i++) { for (int j = 0; j < parameters.Length; j++) { if (i != j) { Assert.AreNotSame(parameters[i], parameters[j]); Assert.AreNotEqual(parameters[i], parameters[j]); Assert.AreNotEqual(parameters[i].Parameter1, parameters[j].Parameter1); Assert.AreNotEqual(parameters[i].Parameter2, parameters[j].Parameter2); Assert.AreNotEqual(parameters[i].Parameter3, parameters[j].Parameter3); } Assert.AreEqual(parameters[i].Parameter1.Values, parameters[j].Parameter1.Values); Assert.AreEqual(parameters[i].Parameter2.Values, parameters[j].Parameter2.Values); Assert.AreEqual(parameters[i].Parameter3.Values, parameters[j].Parameter3.Values); } } }
public ModelScorerGaussian(PhyloTree tree, GridSearch optimizer) : base(tree, optimizer) { }
private void grdConfirm_OnOk(object sender, RoutedEventArgs e) { try { if (Keyboard.FocusedElement.GetType().Equals(typeof(TextBoxAutoComplete))) { AutoCompleteTextBox txt = (Keyboard.FocusedElement as TextBoxAutoComplete).ParentControl; if (!txt.CheckLostFocus()) { return; } } if (CheckValid()) { SysObj.SetSysvar("M_ngay_ct1", txtNgay_ct1.dValue); SysObj.SetSysvar("M_ngay_ct2", txtNgay_ct2.dValue); bool IsshowView = false; GridSearch._GenerateSQLString(); GridSearch.GrdSearch.ExecuteCommand(DataPresenterCommands.EndEditModeAndAcceptChanges); StartUp.TransFilterCmd.Parameters["@PhFilter"].Value = GetPhFilterExpr(); // "ngay_ct between '20100101' and '20100131'"; StartUp.TransFilterCmd.Parameters["@CtFilter"].Value = GetCtFilterExpr(); StartUp.TransFilterCmd.Parameters["@Sl_ct"].Value = 0; DataSet newDs = DataProvider.FillCommand(StartUp.SysObj, StartUp.TransFilterCmd); // xuất thông báo tìm kiếm int n = 0; Decimal a = (from p in newDs.Tables[0].AsEnumerable() select p.Field <Decimal?>("t_tt")).Sum().Value; string tongPsVND = a.ToString(SysObj.GetOption("M_IP_TIEN").ToString()); Decimal tongPsNT = (from p in newDs.Tables[0].AsEnumerable() select p.Field <Decimal?>("t_tt_nt")).Sum().Value; string _tongPsNT = tongPsNT.ToString(SysObj.GetOption("M_IP_TIEN_NT").ToString()); n = newDs.Tables[0].Rows.Count; //a = StartUp.DsTrans.Tables[0].AsEnumerable().Sum("t_tt").Value.ToString(); if (n > 0) { IsshowView = true; Sm.Windows.Controls.ExMessageBox.Show(410, StartUp.SysObj, "Có " + "[" + n + "]" + " chứng từ. Tổng phát sinh " + "[" + _tongPsNT + "]" + " / " + "[" + tongPsVND + "]", "", MessageBoxButton.OK, MessageBoxImage.Information); } else { Sm.Windows.Controls.ExMessageBox.Show(415, StartUp.SysObj, "Không có chứng từ nào như vậy!", "", MessageBoxButton.OK, MessageBoxImage.Information); } if (IsshowView == true) { // show form View //string stringBrowse1 = "ngay_ct:100:h=Ngày c.từ:FL:D;so_ct:70:h=Số c.từ:FL;so_seri:70:h=Số seri;" + // "ma_kh:100:h=Mã khách;ten_kh:180:h=Tên khách;dien_giai:225:h=Diễn giải;" + // "ma_bp:100:h=NVBH;t_tien_nt2:130:h=Tiền hàng nt:N1:S;t_thue_nt:130:h=Tiền thuế nt:N1:S;" + // "t_tt_nt:130:h=Tổng tt nt:N1:S;ma_nx:80:h=Mã nx;thue_suat:80:h=Thuế suất:F=2;tk_thue_co:80:h=Tk thuế;" + // "t_tien2:130:h=Tiền hàng:N0:S;t_thue:130:h=Tiền thuế:N0:S;t_tt:130:h=Tổng tt:N0:S;ma_nt:100:h=Mã nt;" + // "ty_gia:130:h=Tỷ giá:R;date:105:h=Ngày cập nhật:D;time:100:h=Giờ cập nhật;" + // "user_id:100:h=Số hiệu NSD:N;user_name:100:h=Tên NSD"; //string stringBrowse2 = "ma_vt:100:h=Mã vật tư:FL;ten_vt:270:h=Tên vật tư:FL;dvt1:50:h=Ðvt;" + // "ma_kho_i:70:h=Mã kho;so_luong:130:h=Số lượng:Q:S;gia_nt2:130:h=Giá bán nt:P1;tien_nt2:130:h=Thành tiền nt:N1:S;" + // "tk_dt:80:h=Tk dt;gia_nt:130:h=Giá vốn nt:P1;tien_nt:130:h=Tiền vốn nt:N1:S;tk_vt:80:h=Tk kho;" + // "tk_gv:80:h=Tk gv;gia2:130:h=Giá bán:P0;tien2:130:h=Thành tiền:N0:S;" + // "gia:130:h=Giá vốn:N1;tien:130:h=Tiền vốn:N0:S"; SmVoucherLib.FormView _frmView = new SmVoucherLib.FormView(SysObj, newDs.Tables[0].DefaultView, newDs.Tables[1].DefaultView, StartUp.stringBrowse1, StartUp.stringBrowse2, "stt_rec"); _frmView.ListFieldSum = "t_tt_nt;t_tt"; _frmView.TongCongLabel = "Tổng thanh toán"; if (StartUp.M_LAN.Equals("V")) { _frmView.frmBrw.Title = StartUp.M_Tilte + ". Ky " + txtNgay_ct1.Text + " - " + txtNgay_ct2.Text; } else { _frmView.frmBrw.Title = StartUp.M_Tilte + ". Period " + txtNgay_ct1.Text + " - " + txtNgay_ct2.Text; } //Them cac truong tu do SmVoucherLib.FreeCodeFieldLib.InitFreeCodeField(StartUp.SysObj, _frmView.frmBrw.oBrowseCt, StartUp.Ma_ct, 1); _frmView.frmBrw.LanguageID = "TT_SOCTHDA_HDDT_8"; _frmView.ShowDialog(); StartUp.DataFilter(StartUp.DsTrans.Tables[0].Rows[0]["stt_rec"].ToString()); int Count1 = StartUp.DsTrans.Tables[0].Rows.Count; int Count2 = StartUp.DsTrans.Tables[1].Rows.Count; for (int i = Count1 - 1; i >= 1; i--) { StartUp.DsTrans.Tables[0].Rows.RemoveAt(i); } for (int i = 0; i < Count2; i++) { StartUp.DsTrans.Tables[1].Rows.RemoveAt(0); } int Count = 0; Count = newDs.Tables[0].Rows.Count; for (int i = 0; i < Count; i++) { StartUp.DsTrans.Tables[0].Rows.Add(newDs.Tables[0].Rows[i].ItemArray); } Count = newDs.Tables[1].Rows.Count; for (int i = 0; i < Count; i++) { StartUp.DsTrans.Tables[1].Rows.Add(newDs.Tables[1].Rows[i].ItemArray); } // ko xoá dòng thứ 0 của table[0] vì dòng đó là dòng tạm. if (newDs.Tables[0].Rows.Count > 0) { //Xét lại irow if (FrmTT_SOCTHDA_HDDT.iRow > newDs.Tables[0].Rows.Count - 1) { FrmTT_SOCTHDA_HDDT.iRow = newDs.Tables[0].Rows.Count - 1; } StartUp.DataFilter(StartUp.DsTrans.Tables[0].Rows[FrmTT_SOCTHDA_HDDT.iRow]["stt_rec"].ToString()); } // Set lai irow va rowfilter ... if (_frmView.DataGrid.ActiveRecord != null) { int select_irow = (_frmView.DataGrid.ActiveRecord as DataRecord).Index; if (select_irow >= 0) { string selected_stt_rec = (_frmView.DataGrid.DataSource as DataView)[select_irow]["stt_rec"].ToString(); FrmTT_SOCTHDA_HDDT.iRow = select_irow + 1; //refresh lại rowfilter StartUp.DataFilter(selected_stt_rec); } } this.Close(); } } } catch (Exception ex) { SmErrorLib.ErrorLog.CatchMessage(ex); } }
public void TestGridSearch() { //https://www.hackerrank.com/challenges/the-grid-search/problem /* * string[] G = new string[] * { * "7283455864", * "6731158619", * "8988242643", * "3830589324", * "2229505813", * "5633845374", * "6473530293", * "7053106601", * "0834282956", * "4607924137" * }; * * string[] P = new string[] * { * "9505", * "3845", * "3530" * }; */ string[] G = new string[] { "123412", "561212", "123634", "7812889" }; string[] P = new string[] { "12", "34" }; /* * string[] G = new string[] * { * "111111111111111", * "111111111111111", * "111111011111111", * "111111111111111", * "111111111111111" * }; * * string[] P = new string[] * { * "11111", * "11111", * "11110" * }; */ string result = GridSearch.Search(G, P); }
void InsertRowCT(string nd51) { DataRow[] _row = DsPrint.Tables["TableCT"].DefaultView.ToTable().Select("tag = 1"); if (_row.Count() > 0) { return; } string stt_rec = DsPrint.Tables["TablePH"].DefaultView[0]["stt_rec"].ToString(); int _index = 1; foreach (DataRowView dr in DsPrint.Tables["TableCT"].DefaultView) { dr["stt"] = _index; _index++; } /*Chị VANTT bảo không lên đoạn này 130734187 * if (nd51 == "1") * { * //Thêm dòng ghi chú * string gc_thue = DsPrint.Tables["TablePH"].DefaultView[0]["gc_thue"].ToString().Trim(); * if (gc_thue != "") * { * DataRow row = DsPrint.Tables["TableCT"].NewRow(); * row["stt_rec"] = stt_rec; * row["ten_vt"] = "(" + gc_thue + ")"; * row["tag"] = 1; * DsPrint.Tables["TableCT"].Rows.Add(row); * } * } */ //thêm dòng chiết khấu decimal t_ck = Convert.ToDecimal(DsPrint.Tables["TablePH"].DefaultView[0]["t_ck"]); decimal t_ck_nt = Convert.ToDecimal(DsPrint.Tables["TablePH"].DefaultView[0]["t_ck_nt"]); if (t_ck != 0 || t_ck_nt != 0) { DataRow newrow = DsPrint.Tables["TableCT"].NewRow(); newrow["stt_rec"] = stt_rec; newrow["stt_rec0"] = -1; newrow["ten_vt"] = "Chiết khấu"; newrow["ten_vt2"] = "Discount"; newrow["tien2"] = t_ck; newrow["tien_nt2"] = t_ck_nt; newrow["tag"] = 1; DsPrint.Tables["TableCT"].Rows.Add(newrow); } int rowCountCT = DsPrint.Tables["TableCT"].DefaultView.Count; GridSearch.InsertSubRow("HDA", "TableCT"); ////Thêm số dòng cho đủ ngầm định //if (rowCountCT < so_dong_in) //{ // for (int k = rowCountCT; k < so_dong_in; k++) // { // DataRow row = DsPrint.Tables["TableCT"].NewRow(); // row["stt_rec"] = stt_rec; // row["stt_rec0"] = "999"; // row["tag"] = 1; // DsPrint.Tables["TableCT"].Rows.Add(row); // } //} DsPrint.Tables["TableCT"].DefaultView.RowFilter = "stt_rec= '" + stt_rec + "'"; DsPrint.Tables["TableCT"].DefaultView.Sort = "stt_rec0"; GridSearch.DSource = DsPrint; }
public void GridsearchConstructorTest() { Accord.Math.Tools.SetupGenerator(0); // Example binary data double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = // xor labels { -1, 1, 1, -1 }; // Declare the parameters and ranges to be searched GridSearchRange[] ranges = { new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 0.50 } ), new GridSearchRange("degree", new double[] { 1, 10, 2, 3, 4, 5 } ), new GridSearchRange("constant", new double[] { 0, 1, 2 } ) }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch<KernelSupportVectorMachine>(ranges); // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. int degree = (int)parameters["degree"].Value; double constant = parameters["constant"].Value; double complexity = parameters["complexity"].Value; // Use the parameters to build the SVM model Polynomial kernel = new Polynomial(degree, constant); KernelSupportVectorMachine ksvm = new KernelSupportVectorMachine(kernel, 2); // Create a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(ksvm, inputs, xor); smo.Complexity = complexity; // Measure the model performance to return as an out parameter error = smo.Run(); return ksvm; // Return the current model }; // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine KernelSupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError); // A linear kernel can't solve the xor problem. Assert.AreNotEqual((int)bestParameters["degree"].Value, 1); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(minError, 0.0); Assert.IsNotNull(bestModel); Assert.IsNotNull(bestParameters); Assert.AreEqual(bestParameters.Count, 3); }
public void Dijkstra() { var path = GridSearch.Dijkstra(_grid, _startCell.GetPosition(), _endCell.GetPosition()); StartCoroutine(ShowPath(path)); }