/// <summary> /// 测试小球OKNG的方法 /// </summary> /// <param name="hv_liudao">小球所在位置</param> /// <param name="hv_channels">流道信息</param> /// <param name="hv_liudaoR">小球直径(像素值)</param> /// <param name="hv_myLength">小球标准直径(像素值)</param> /// <param name="hv_myThreashod">小球阈值范围</param> /// <param name="hv_myResult">是否都是好的</param> /// <param name="hv_NGLengths">NG小球直径</param> /// <param name="hv_NGChls">NG小球所在流道</param> public void testOKNG2(HTuple hv_liudao, HTuple hv_channels, HTuple hv_liudaoR, HTuple hv_myLength, HTuple hv_myThreashod, out HTuple hv_myResult, out HTuple hv_NGLengths, out HTuple hv_NGChls) { // Local iconic variables // Local control variables HTuple hv_num = null, hv_num2 = null, hv_Index1 = null; HTuple hv_Index2 = new HTuple(), hv_lll = new HTuple(); HTuple hv_ff1 = new HTuple(), hv_ff2 = new HTuple(), hv_ff = new HTuple(); // Initialize local and output iconic variables hv_myResult = 1; hv_num = new HTuple(hv_liudao.TupleLength()); hv_num2 = new HTuple(hv_channels.TupleLength()); hv_NGChls = new HTuple(); hv_NGLengths = new HTuple(); HTuple end_val8 = hv_num - 1; HTuple step_val8 = 1; for (hv_Index1 = 0; hv_Index1.Continue(end_val8, step_val8); hv_Index1 = hv_Index1.TupleAdd(step_val8)) { HTuple end_val9 = hv_num2; HTuple step_val9 = 1; for (hv_Index2 = 1; hv_Index2.Continue(end_val9, step_val9); hv_Index2 = hv_Index2.TupleAdd(step_val9)) { if ((int)(new HTuple(((hv_liudao.TupleSelect(hv_Index1))).TupleLess(hv_channels.TupleSelect( hv_Index2)))) != 0) { break; } } hv_lll = hv_Index2 - 1; hv_ff1 = new HTuple(((hv_liudaoR.TupleSelect(hv_Index1))).TupleGreater((hv_myLength.TupleSelect( hv_lll)) - (hv_myThreashod.TupleSelect(hv_lll)))); hv_ff2 = new HTuple(((hv_liudaoR.TupleSelect(hv_Index1))).TupleLess((hv_myLength.TupleSelect( hv_lll)) + (hv_myThreashod.TupleSelect(hv_lll)))); hv_ff = hv_ff1.TupleAnd(hv_ff2); if ((int)(hv_ff) != 0) { } else { hv_NGChls = hv_NGChls.TupleConcat(hv_Index2); hv_NGLengths = hv_NGLengths.TupleConcat(hv_liudaoR.TupleSelect(hv_Index1)); } hv_myResult = hv_myResult.TupleAnd(hv_ff); } return; }
public void TrainProcess(HTuple hv_WindowHandle, string pretrained_DlClassifierName) { this.hv_WindowHandle = hv_WindowHandle; //This example shows how to train a deep learning fruit classifier, along with a short overview of the necessary steps. // //Initialization. //dev_open_window_fit_size(0, 0, hv_WindowWidth, hv_WindowHeight, -1, -1, out hv_WindowHandle); set_display_font(hv_WindowHandle, 16, "mono", "true", "false"); // //Some procedures use a random number generator. Set the seed for reproducibility. HOperatorSet.SetSystem("seed_rand", 42); // HOperatorSet.ClearWindow(hv_WindowHandle); // //** TRAINING ** // //Read one of the pretrained networks. HOperatorSet.ReadDlClassifier(pretrained_DlClassifierName, out this.hv_Train_DLClassifierHandle); //2) Split data into training, validation, and test set. // //Read the data, i.e., the paths of the images and their respective ground truth labels. hv_ImageFiles.Dispose(); hv_Labels.Dispose(); hv_LabelsIndices.Dispose(); hv_Classes.Dispose(); read_dl_classifier_data_set(this.hv_PreprocessedFolder, "last_folder", out hv_ImageFiles, out hv_Labels, out hv_LabelsIndices, out hv_Classes); // //Split the data into three subsets, //Default for training 80%, validation 20%, and testing 0%. hv_TrainingImages.Dispose(); hv_TrainingLabels.Dispose(); hv_ValidationImages.Dispose(); hv_ValidationLabels.Dispose(); hv_TestImages.Dispose(); hv_TestLabels.Dispose(); split_dl_classifier_data_set(hv_ImageFiles, hv_Labels, this.hv_TrainingPercent, this.hv_ValidationPercent, out hv_TrainingImages, out hv_TrainingLabels, out hv_ValidationImages, out hv_ValidationLabels, out hv_TestImages, out hv_TestLabels); // //Set training hyper-parameters. //In order to retrain the neural network, we have to specify //the class names of our classification problem. HOperatorSet.SetDlClassifierParam(this.hv_Train_DLClassifierHandle, "classes", hv_Classes); //Set the batch size. HOperatorSet.SetDlClassifierParam(this.hv_Train_DLClassifierHandle, "batch_size", this.hv_BatchSize); hv_RemovePreprocessingAfterExample.Dispose(); hv_RemovePreprocessingAfterExample = 0; //Try to initialize the runtime environment. try { HOperatorSet.SetDlClassifierParam(this.hv_Train_DLClassifierHandle, "runtime_init", "immediately"); } // catch (Exception) catch (HalconException HDevExpDefaultException1) { HDevExpDefaultException1.ToHTuple(out hv_Exception); HOperatorSet.DispText(hv_WindowHandle, "Failed to initialize the runtime environment.", "window", "bottom", "right", "red", "box", "false"); //dev_disp_error_text(hv_Exception); if ((int)(hv_RemovePreprocessingAfterExample.TupleAnd(new HTuple(((hv_Exception.TupleSelect( 0))).TupleNotEqual(4104)))) != 0) { remove_dir_recursively(hv_PreprocessedFolder); using (HDevDisposeHelper dh = new HDevDisposeHelper()) { HOperatorSet.DispText(hv_WindowHandle, ("Preprocessed data in folder \"" + hv_PreprocessedFolder) + "\" have been deleted.", "window", "top", "right", "red", new HTuple(), new HTuple()); } } return; } //For this data set, an initial learning rate of 0.001 has proven to yield good results. HOperatorSet.SetDlClassifierParam(this.hv_Train_DLClassifierHandle, "learning_rate", this.hv_InitialLearningRate); // stop(...); only in hdevelop // //Train the classifier. HOperatorSet.DispText(hv_WindowHandle, "Training has started...", "window", "top", "left", "black", new HTuple(), new HTuple()); Console.WriteLine("开始训练过程..."); _TrainingThread = new Thread(new ThreadStart(TrainingThread)); _TrainingThread.IsBackground = true; _TrainingThread.Start(); // //In this example, we reduce the learning rate by a factor of 1/10 every 5th epoch. //We iterate 50 times over the full training set. //ComputeConfusionMatrix(hv_WindowHandle); }