public bool FindTrainBetween( TrainData _TrainData ,
	                             ref int _StationID_i ,
	                             ref int _StationID_j )
    {
        return FindTrainBetween( m_ShareHour ,
                                m_ShareMinute ,
                                _TrainData ,
                                m_Stations ,
                                ref _StationID_i ,
                                ref _StationID_j )  ;
    }
 public List<TrainData> Read(String filePath, int direction)
 {
     List<TrainData> resultList = new List<TrainData>();
     if (filePath == null || filePath == "") { throw new Exception("文件为空"); }
     String sConnectionString = "Provider=Microsoft.Jet.OleDb.4.0;" + "data source=" + filePath + ";Extended Properties='Excel 8.0; HDR=yes; IMEX=0'"; ;
     OleDbConnection objConn = new OleDbConnection(sConnectionString);
     objConn.Open();
     OleDbCommand objCmdSelect = new OleDbCommand("SELECT * FROM [Sheet1$]", objConn);
     OleDbDataAdapter objAdapter = new OleDbDataAdapter();
     objAdapter.SelectCommand = objCmdSelect;
     DataSet workListDataset = new DataSet();
     objAdapter.Fill(workListDataset);
     Dictionary<int, String> stationDict = new Dictionary<int, string>();
     for (int i = 1; i < workListDataset.Tables[0].Rows.Count; i+=2)
     {
         String stationName=workListDataset.Tables[0].Rows[i][0].ToString();
         if (stationName!="") {
             stationDict.Add(i, stationName);
             stationDict.Add(i + 1, stationName);
         }
     }
     for (int i = 1; i < workListDataset.Tables[0].Columns.Count; i++)
     {
         String trainNo = workListDataset.Tables[0].Columns[i].ColumnName;
         TrainData td = new TrainData(trainNo, DecideType(trainNo), direction);
         TimeReader timeReader = new TimeReader();
         if (direction == 1)
         {
             for (int j = 1; j < stationDict.Count + 1; j+=2) {
                 String stationName = stationDict[j];
                 String arriveTime = timeReader.ConvertTime(workListDataset.Tables[0].Rows[j][i].ToString());
                 String leaveTime = timeReader.ConvertTime(workListDataset.Tables[0].Rows[j+1][i].ToString());
                 if (arriveTime != "" || leaveTime != "") {
                     td.AddStop(stationName, arriveTime, leaveTime);
                 }
             }
         }
         else
         {
             for (int j = stationDict.Count; j > 0; j-=2)
             {
                 String stationName = stationDict[j];
                 String arriveTime = timeReader.ConvertTime(workListDataset.Tables[0].Rows[j][i].ToString());
                 String leaveTime = timeReader.ConvertTime(workListDataset.Tables[0].Rows[j - 1][i].ToString());
                 if (arriveTime != "" || leaveTime != "")
                 {
                     td.AddStop(stationName, arriveTime, leaveTime);
                 }
             }
         }
         resultList.Add(td);
     }
     return resultList;
 }
Example #3
0
        public Form2()
        {
            InitializeComponent();
            //初始化
            bp = new ANN_MLP();
            Matrix<int> layerSizes = new Matrix<int>(new int[] { 2, 2, 2, 2, 1 });
            bp.SetLayerSizes(layerSizes);
            bp.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.Gaussian, 0, 0);
            bp.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
            //bp.BackpropWeightScale = 0.1;
            //bp.BackpropMomentumScale = 0.1;
            bp.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0, 0);
            //训练
            float[,] labels = new float[,] {
            { 0 }, { 1 }, { 0 }, { 1 }
            };
            Matrix<float> labelsMats = new Matrix<float>(labels);
            //Matrix<float> labelsMats = new Matrix<float>(count, 1);
            //Matrix<float> labelsMats1 = labelsMats.GetRows(0, count >> 1, 1);
            //labelsMats1.SetValue(1);
            //Matrix<float> labelsMats2 = labelsMats.GetRows(count >> 1, count, 1);
            //labelsMats2.SetValue(0);
            float[,] trainingData = new float[,] {
            { 1, 2 }, { 51, 52 }, { 111, 112 }, { 211, 212 }
            };
            for (int i = 0; i < trainingData.GetLength(0); i++)//归一化
            {
                for (int j = 0; j < trainingData.GetLength(1); j++)
                {
                    trainingData[i, j] /= 512;
                }
            }
            Matrix<float> trainingDataMat = new Matrix<float>(trainingData);
            //Matrix<float> trainingDataMat = new Matrix<float>(count, 2);
            //Matrix<float> trainingDataMat1 = trainingDataMat.GetRows(0, count >> 1, 1);
            //trainingDataMat1.SetRandNormal(new MCvScalar(200 / 512f), new MCvScalar(50 / 512f));
            //Matrix<float> trainingDataMat2 = trainingDataMat.GetRows(count >> 1, count, 1);
            //trainingDataMat2.SetRandNormal(new MCvScalar(300 / 512f), new MCvScalar(50 / 512f));

            TrainData tmpTrainData = new TrainData(trainingDataMat, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, labelsMats);
            bp.Train(tmpTrainData, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);
//#if !NETFX_CORE
//                String fileName = Path.Combine(Application.StartupPath, "ann_mlp_model.xml");
//                bp.Save(fileName);
//                if (File.Exists(fileName))
//                    File.Delete(fileName);
//#endif
        }
Example #4
0
        private void button1_Click(object sender, EventArgs e)
        {
            TrainData trainData = new TrainData()
            {
                Data = new List <List <double> >()
            };

            trainData.Data.Add(new List <double>()
            {
                1.0, 0.0, 0.0
            });
            trainData.Data.Add(new List <double>()
            {
                1.0, 1.0, 0.0
            });
            trainData.Data.Add(new List <double>()
            {
                1.0, 0.0, 1.0
            });
            trainData.Data.Add(new List <double>()
            {
                1.0, 1.0, 1.0
            });

            DesiresData desiresData = new DesiresData()
            {
                Desires = new List <List <double> >()
            };

            desiresData.Desires.Add(new List <double>()
            {
                0
            });
            desiresData.Desires.Add(new List <double>()
            {
                1
            });
            desiresData.Desires.Add(new List <double>()
            {
                1
            });
            desiresData.Desires.Add(new List <double>()
            {
                0
            });

            _preceptron.Train(trainData, desiresData);
        }
        private static Item GetCustomTrainTreasure(TRAINS index)
        {
            TrainData trainData = BetterTrainLootMod.Instance.trainCars[index];

            // Possible treasure based on selected treasure group selected above.
            List <TrainTreasure> possibleLoot = new List <TrainTreasure>(trainData.treasureList)
                                                .Where(loot => loot.Enabled && loot.IsValid())
                                                .OrderBy(loot => loot.Chance)
                                                .ThenBy(loot => loot.Id)
                                                .ToList();

            if (possibleLoot.Count == 0)
            {
                BetterTrainLootMod.Instance.Monitor.Log($"   Group: {trainData.TrainCarID}, No Possible Loot Found... check the logic");
            }

            TrainTreasure treasure = possibleLoot.ChooseItem(Game1.random);
            int           id       = treasure.Id;

            // Lost books have custom handling  -- No default lost books... but someone might configure them
            if (id == 102) // LostBook Item ID
            {
                if (Game1.player.archaeologyFound == null || !Game1.player.archaeologyFound.ContainsKey(102) || Game1.player.archaeologyFound[102][0] >= 21)
                {
                    possibleLoot.Remove(treasure);
                }
                Game1.showGlobalMessage("You found a lost book. The library has been expanded.");
            }

            Item reward;

            // Create reward item
            if ((id >= 516 && id <= 534) || id == 810 || id == 811 || id == 839 || (id >= 859 && id <= 863) || id == 887 || id == 888)
            {
                reward = new Ring(id);
            }
            else if ((id >= 504 && id <= 515) || id == 804 || id == 806 || id == 853 || id == 854 || id == 855 || id == 878)
            {
                reward = new Boots(id);
            }
            //reward = new Clothing(id);
            //reward = new Hat(id);
            else
            {
                reward = (Item) new StardewValley.Object(id, 1); // Note: if any boots or rings are in the treasure list, they will not be equipable
            }
            return(reward);
        }
Example #6
0
        private void PreprocessToWorkTreeRandomForest(TrainData trainedData, TrainData checkData,
                                                      Dictionary <int, string> analysParameter)
        {
            int numberOfForest = Convert.ToInt32(numberOfForests.Value);

            RandForests = new List <DM.DecisionTree.TreeNode>();
            //double accuracy = 0.0;
            //do
            //{
            //Random rand = new Random((int) (DateTime.Now.Ticks));
            //DivideSet(rand, trainData, trainedData, checkData);
            List <DM.DecisionTree.TreeNode> rndForests = new List <DM.DecisionTree.TreeNode>();
            int parameterID = XMLWork.FindIDWithName(parameterCondition.SelectedItem.ToString(), Properties.Settings.Default.Languages);
            int restrictCountOfParameters          = Convert.ToInt32(Math.Sqrt(analysParameter.Count)) + 1;
            Dictionary <int, string> newParameters = new Dictionary <int, string>();


            for (int forestsCount = 0; forestsCount < numberOfForest;)
            {
                DM.DecisionTree.RandomForest random = new RandomForest();
                List <int> parametersName           = new List <int>();
                foreach (KeyValuePair <int, string> keyValuePair in analysParameter)
                {
                    if (!keyValuePair.Value.StartsWith("Def"))
                    {
                        parametersName.Add(keyValuePair.Key);
                    }
                }
                for (int i = 0; i < restrictCountOfParameters; i++)
                {
                    int j = rand.Next(0, analysParameter.Count);
                    if (!newParameters.ContainsKey(parametersName[j]))
                    {
                        newParameters.Add(parametersName[j], analysParameter[parametersName[j]]);
                    }
                }
                List <OneRow> trainData = new List <OneRow>();
                trainData = GetRandomData(TrainData.Train, newParameters, parameterID);
                _root     = random.MountTree(trainData, newParameters, parameterID, Convert.ToInt32(textEdit1.Value) + 1);
                if (_root.attributeName != "False")
                {
                    RandForests.Add(_root);
                    forestsCount++;
                }
                newParameters.Clear();
            }
        }
Example #7
0
        private void TrainBP(Dictionary <Bitmap, List <Rectangle> > imgs)
        {
            bpTrainDataCount = imgs.Count;
            Matrix <float> trainingDataMats = new Matrix <float>(bpTrainDataCount, bpWidth * bpHeight);

            trainingDataMats.SetValue(0);
            Matrix <float> labelsMats = new Matrix <float>(bpTrainDataCount, bpRectangleCount * 4);

            labelsMats.SetValue(-1);
            int j = 0;//行数

            foreach (var item in imgs)
            {
                Bitmap      img   = item.Key;
                Rectangle[] rects = item.Value.ToArray();
                //图片
                //Matrix<float> trainingDataRow = trainingDataMats.GetRow(j);
                Bitmap             tmpImg       = ZoomImg(img, bpWidth, bpHeight, ref rects);
                Image <Bgr, float> trainingData = new Image <Bgr, float>(tmpImg);
                for (int i = 0; i < bpWidth * bpHeight; i++)
                {
                    trainingDataMats[j, i] = Color.FromArgb(
                        (int)trainingData.Data[i / bpWidth, i % bpHeight, 2],
                        (int)trainingData.Data[i / bpWidth, i % bpHeight, 1],
                        (int)trainingData.Data[i / bpWidth, i % bpHeight, 0]
                        ).ToArgb() / (float)0xFFFFFF;
                }
                //矩形数据
                for (int i = 0; i < rects.Length * 4 && i < bpRectangleCount * 4; i += 4)
                {
                    //Matrix<float> labelsMatsRow = labelsMats.GetRow(j);
                    labelsMats[j, i]     = rects[i / 4].X / (float)bpWidth;
                    labelsMats[j, i + 1] = rects[i / 4].Y / (float)bpHeight;
                    labelsMats[j, i + 2] = rects[i / 4].Width / (float)bpWidth;
                    labelsMats[j, i + 3] = rects[i / 4].Height / (float)bpHeight;
                }
                tmpImg.Dispose();
                tmpImg = null;
                j++;
            }
            TrainData tmpTrainData = new TrainData(trainingDataMats, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, labelsMats);

            //bp.Train(tmpTrainData, (int)(Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.NoInputScale | Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.NoOutputScale));
            bp.Train(tmpTrainData, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);
        }
Example #8
0
 public bool trainingNaiveBayes(Matrix <float> inputData, Matrix <int> outputData, string modelName)
 {
     try
     {
         using (NormalBayesClassifier classifier = new NormalBayesClassifier())
         {
             TrainData training = new TrainData(inputData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, outputData); // Creating training data
             classifier.Train(training);
             String fileName = modelName + ".xml";
             classifier.Save(fileName);
         }
         return(true);
     }
     catch (Exception ee)
     {
         throw ee;
     }
 }
Example #9
0
 public void MakeTrainFromData(TrainData trainData)
 {
     TrainNumber    = trainData.TrainNumber;
     TrainDirection = trainData.TrainDirection;
     intEngine      = trainData.Engine;
     Cars           = trainData.Cars;
     StartTime      = trainData.PrevStationTime;
     OverTime       = trainData.NextStationTime;
     isComplete     = false;
     IsActive       = false;
     CarsHolder     = FindObjectOfType <CarsHolder> ();
     //cache each train engine, but will use only trainEngine
     engine = CarsHolder.GetCar(intEngine).GetComponent <Engine> ();
     SetTrainProperties(this);
     EventManager.onHourPassed         += TrainBuild;
     EventManager.onMinutePassed       += CheckTrain;
     EventManager.onTrainSignalChanged += TryToGo;
 }
Example #10
0
        static void Main(string[] args)
        {
            const int featureWindowSize     = /*24*/ 16;
            const int halfFeatureWindowSize = featureWindowSize / 2;
            string    imageMainPath         = args[0];

            Image <L8> fullTextImage = Image.Load <L8>(imageMainPath + @"Images\FS18800114.2.11-a2-427w-c32.png");
            Image <L8> completeA     = Image.Load <L8>(imageMainPath + @"\Images\FS18800114.2.11-a2-427w-c32\a\CompleteA.png");

            PakiraDecisionTreeGenerator pakiraGenerator = new PakiraDecisionTreeGenerator();
            TrainData trainData = new TrainData();

            L8   whitePixel         = new L8(255);
            L8   blackPixel         = new L8(0);
            L8   dontCarePixel      = new L8(128);
            byte dontCarePixelValue = dontCarePixel.PackedValue;

            byte[] imageCropPixelsData = new byte[featureWindowSize * featureWindowSize * Unsafe.SizeOf <L8>()];

            completeA.ProcessPixelRows(accessor =>
            {
                Span <byte> imageCropPixels = new Span <byte>(imageCropPixelsData);

                for (int y = 0; y < accessor.Height; y++)
                {
                    Span <L8> pixelRow = accessor.GetRowSpan(y);

                    for (int x = 0; x < pixelRow.Length; x++)
                    {
                        // Get a reference to the pixel at position x
                        ref L8 pixel = ref pixelRow[x];

                        if (pixel != dontCarePixel)
                        {
                            Image <L8> whiteWindow = new Image <L8>(featureWindowSize, featureWindowSize, whitePixel);
                            Image <L8> imageCrop   = whiteWindow.Clone(clone => clone.DrawImage(fullTextImage, new Point(halfFeatureWindowSize - x, halfFeatureWindowSize - y), 1));

                            imageCrop.CopyPixelDataTo(imageCropPixels);

                            trainData = trainData.AddSample(imageCropPixelsData.Select <byte, double>(s => s), pixel.PackedValue);
                        }
                    }
                }
            });
Example #11
0
        public void MinimumSampleCount()
        {
            PakiraDecisionTreeGenerator pakiraGenerator = PakiraGeneratorTests.CreatePakiraGeneratorInstance();
            TrainData trainData = new TrainData();

            trainData = trainData.AddSample(ImmutableList.CreateRange(new double[] { 2, 3 }), 42);
            trainData = trainData.AddSample(ImmutableList.CreateRange(new double[] { 20, 140 }), 54);
            trainData = trainData.AddSample(ImmutableList.CreateRange(new double[] { 33, 200 }), 42);

            PakiraDecisionTreeModel pakiraDecisionTreeModel = new PakiraDecisionTreeModel(trainData.Samples[0]);

            pakiraDecisionTreeModel = pakiraGenerator.Generate(pakiraDecisionTreeModel, trainData);

            pakiraDecisionTreeModel.Tree.Root.ShouldNotBeNull();

            pakiraDecisionTreeModel.PredictNode(trainData.Samples[0]).Value.ShouldBe(trainData.Labels[0]);
            pakiraDecisionTreeModel.PredictNode(trainData.Samples[1]).Value.ShouldBe(trainData.Labels[1]);
            pakiraDecisionTreeModel.PredictNode(trainData.Samples[2]).Value.ShouldBe(trainData.Labels[2]);
        }
Example #12
0
        public int AddTrainSummary(string trainID, string fileID)
        {
            int       result   = 0;
            TrainData model    = new TrainData();
            var       filetype = fileID.Split('.');

            model.Create();
            model.FileID  = fileID;
            model.TrainID = trainID;
            if (filetype.Count() > 1)
            {
                model.TypeID = filetype[1];
            }

            model.UploadTime = DateTime.Now;
            model.DeleteMark = false;
            result           = service.Insert(model);
            return(result);
        }
Example #13
0
        private void btnTrain_Click(object sender, EventArgs e)
        {
            HOGDescriptor hog = new HOGDescriptor(new Size(36, 36), new Size(36, 36), new Size(6, 6), new Size(6, 6));

            fsPeg = Directory.GetFiles(txtPosPath.Text);
            for (int i = 0; i < fsPeg.Length; i++)
            {
                String             cFileName = txtPosPath.Text + fsPeg[i];
                Image <Bgr, byte>  vImage    = new Image <Bgr, byte>(cFileName);
                Image <Gray, byte> vGray     = vImage.Convert <Gray, byte>();
                float[]            fAttr     = hog.Compute(vGray);
                for (int j = 0; j < fAttr.Length; j++)
                {
                    DataMatrix[i, j] = fAttr[j];
                }
                AttrMatrix[i, 0] = 1;
            }

            fsNeg = Directory.GetFiles(txtNegPath.Text);
            for (int i = 0; i < fsNeg.Length; i++)
            {
                String             cFileName = txtNegPath.Text + fsNeg[i];
                Image <Bgr, byte>  vImage    = new Image <Bgr, byte>(cFileName);
                Image <Gray, byte> vGray     = vImage.Convert <Gray, byte>();
                float[]            fAttr     = hog.Compute(vGray);
                for (int j = 0; j < fAttr.Length; j++)
                {
                    DataMatrix[i, j] = fAttr[j];
                }
                AttrMatrix[i, 0] = 0;
            }

            Emgu.CV.ML.SVM vSVM = new Emgu.CV.ML.SVM();
            vSVM.Type = Emgu.CV.ML.SVM.SvmType.CSvc;
            vSVM.SetKernel(Emgu.CV.ML.SVM.SvmKernelType.Linear);
            vSVM.TermCriteria = new MCvTermCriteria(1000, 0.1);
            TrainData td = new TrainData(DataMatrix, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, AttrMatrix);
            String    cExportFileName = txtFileName.Text;

            vSVM.Save(cExportFileName);
        }
    private void LoadTrainTable(string _Filepath)
    {
        Debug.Log("LoadTrainTable() start. _Filepath=" + _Filepath);

        DestroyTrainData();

        TextAsset ta = (TextAsset)Resources.Load(_Filepath,
                                                 typeof(TextAsset));

        if (null == ta)
        {
            Debug.LogError("LoadTrainTable() _Filepath load failed=" + _Filepath);
            return;
        }

        // m_TrainData
        string content = ta.text;

        // Debug.Log( "content=" + content ) ;
        string[] splitor1 = { "\r\n", "\n", "\r" };
        // Debug.Log( "splitor1=" + splitor1[0] ) ;

        TrainData trainData = null;

        string [] lineVec = content.Split(splitor1,
                                          System.StringSplitOptions.None);
        // Debug.Log( "lineVec.Length=" + lineVec.Length ) ;
        for (int i = 0; i < lineVec.Length; ++i)
        {
            if (0 < lineVec[i].Length)
            {
                // Debug.Log( lineVec[ i ] ) ;
                trainData = new TrainData();

                trainData.ParseFromString(lineVec[i]);
                m_TrainData.Add(trainData.ID, trainData);
                Debug.Log("m_TrainData.Add=" + trainData.DisplayName);
            }
        }
        Debug.Log("LoadTrainTable() end.");
    }    // LoadTrainTable
    private void SwapRouteData(int _i, int _j)
    {
        TrainData setRouteTrain = null;

        if (false == m_LevelGeneratorStaticPtr.m_TrainData.ContainsKey(m_SetRouteTrainID))
        {
            return;
        }
        setRouteTrain = m_LevelGeneratorStaticPtr.m_TrainData[m_SetRouteTrainID];

        TimeTableStruct timeStation_tmp = new TimeTableStruct();

        TimeTableStruct[] timeTable = setRouteTrain.m_TimeTable.ToArray();
        if (_i < timeTable.Length &&
            _j < timeTable.Length)
        {
            timeStation_tmp.CopyFrom(timeTable[_j]);
            timeTable[_j].CopyFrom(timeTable[_i]);
            timeTable[_i].CopyFrom(timeStation_tmp);
        }
    }
Example #16
0
 public static AnalyzedTrainData FromTrainData(TrainData trainData)
 {
     return(new AnalyzedTrainData()
     {
         TrainTypes = trainData.Name.TrainType,
         Name = trainData.Name.Name,
         Number = trainData.Name.Number,
         SubTrainTypes = trainData.Name.SubTrainType,
         SubTypeStart = ToStationString(trainData.Name.SubTrainTypeRange?.StartPos),
         SubTypeEnd = ToStationString(trainData.Name.SubTrainTypeRange?.EndPos),
         Time = trainData.Time,
         DepartureStation = ToStationString(trainData.DepartureStation),
         ArrivalType = trainData.ArrivalType,
         Destination = ToStationString(trainData.Destination),
         TrainCondition = trainData.Condition.Condition,
         SuspendRangeStart = ToStationString(trainData.Condition.SuspendRange?.StartPos),
         SuspendRangeEnd = ToStationString(trainData.Condition.SuspendRange?.EndPos),
         DelayTimeMin = trainData.Condition.DelayTimeMin,
         DelayTimeMax = trainData.Condition.DelayTimeMax,
     });
 }
Example #17
0
        public MeteoDataSet(List <MeteoDataRecord> allData, Season s)
        {
            this.Season = s;

            int minYear = allData.Min(g => g.Output.Date.Year);
            int maxYear = allData.Max(g => g.Output.Date.Year);

            for (int year = minYear; year <= maxYear; year++)
            {
                var yearRecords   = allData.Where(g => g.Output.Date.Year == year);
                int yearDataCount = yearRecords.Count();
                int testDataSize  = (int)(TEST_DATA_COUNT * yearDataCount);

                Random rand = new Random();
                int    idx  = rand.Next(0, yearDataCount);

                List <int> drawIndexes = new List <int>();
                while (drawIndexes.Count < testDataSize)
                {
                    if (!drawIndexes.Contains(idx))
                    {
                        drawIndexes.Add(idx);
                    }
                    idx = rand.Next(0, yearDataCount);
                }

                int s1 = TrainData.Count;
                int s2 = TestData.Count;

                TrainData.AddRange(yearRecords.Where((g, i) => !drawIndexes.Contains(i)));
                TestData.AddRange(yearRecords.Where((g, i) => drawIndexes.Contains(i)));

                if ((TestData.Count - s2) + (TrainData.Count - s1) != yearDataCount)
                {
                    throw new Exception("Error during drawing");
                }
            }
        }
Example #18
0
        static void train(string fn)
        {
            //string fn = @"C:\projects\local\GradeChecker\GradeChecker\bin\Debug\report.json";
            try
            {
                var jss = new System.Web.Script.Serialization.JavaScriptSerializer();
                List <Dictionary <string, object> > datas = jss.Deserialize <List <Dictionary <string, object> > >(System.IO.File.ReadAllText(fn));
                string[]       keys = testMQ.Properties.Resources.keys.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
                Matrix <float> data;
                Matrix <int>   response;
                load_data(datas.ToArray(), keys, out data, out response);
                using (SVM model = new SVM())
                {
                    //model.KernelType = SVM.SvmKernelType.Linear;
                    model.SetKernel(SVM.SvmKernelType.Inter);
                    model.Type         = SVM.SvmType.CSvc;
                    model.C            = 1;
                    model.TermCriteria = new Emgu.CV.Structure.MCvTermCriteria(100, 0.00001);

                    //SVMParams p = new SVMParams();
                    //p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
                    //p.SVMType = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
                    //p.C = 1;
                    //p.TermCrit = new MCvTermCriteria(100, 0.00001);
                    TrainData td = new TrainData(data, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response);
                    bool      ok = model.TrainAuto(td, 3);
                    //bool ok = model.Train(data, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response);
                    if (ok)
                    {
                        model.Save("svm.xml");
                        Matrix <float> sample;
                        load_test_data(datas.ToArray(), keys, out sample);
                        float r = model.Predict(sample);
                    }
                }
            }
            catch (Exception) { }
        }
        private void InitializeData()
        {
            TrainData.Clear();
            var outMin = double.MaxValue;
            var outMax = double.MinValue;

            for (var i = Min; i <= Max; i += Step)
            {
                var expected = Function(i);
                if (expected > outMax)
                {
                    outMax = expected;
                }
                if (expected < outMin)
                {
                    outMin = expected;
                }

                var point = new TrainingData {
                    Input = new Array3D(Normalize(i, Min, Max)), Expected = new Array3D(expected)
                };
                TrainData.Add(point);
            }

            foreach (var data in TrainData)
            {
                data.Expected[0] = Normalize(data.Expected[0], outMin, outMax);
            }

            TestData.Clear();
            for (var i = 0; i < TrainData.Count; i++)
            {
                var x = Rand.NextDouble() * (Max - Min) + Min;
                TestData.Add(new TrainingData {
                    Input = new Array3D(Normalize(x, Min, Max)), Expected = new Array3D(Normalize(Function(x), outMin, outMax))
                });
            }
        }
Example #20
0
        public double Train(TrainData data, double[] weight)
        {
            SortedData   sortdata = data as SortedData;
            int          N = sortdata.N, MaxDim = sortdata.MaxDim;
            List <Stump> stumpArray = new List <Stump>(MaxDim);

            double[] sortedWeight = new double[sortdata.N];
            for (int dim = 1; dim <= MaxDim; dim++)
            {
                SortedNode[] sortedDim = sortdata[dim];
                //sort the weight as sortedDim
                for (int n = 0; n < N; n++)
                {
                    sortedWeight[n] = weight[sortedDim[n].N];
                }
                //find the best Stump in a dim.
                Stump stump = OptimalOneDim(sortedDim, sortedWeight, N, dim);
                stumpArray.Add(stump);
            }
            stumpArray.Sort();
            _stump = stumpArray[0];
            return(stumpArray[0].Pm);
        }
Example #21
0
        public bool trainingMLP(Matrix <float> inputData, Matrix <float> outputData, string modelName, int iteration = 1000, double learningRate = 0.01, int hiddenLayers = 2, ANN_MLP.AnnMlpActivationFunction activationType = ANN_MLP.AnnMlpActivationFunction.SigmoidSym, double backpropWeightScale = 0.1, double backpropMomentumScale = 0.2)
        {
            try
            {
                layerSize = new Matrix <int>(new int[] { inputData.Cols, hiddenLayers, 1 });// Integer vector specifying the number of neurons in each layer including the input and output layers. The very first element specifies the number of elements in the input layer. The last element - number of elements in the output layer.

                IInputArray sample_in = inputData;
                IInputArray response  = outputData;



                //===========================================================
                using (ANN_MLP network = new ANN_MLP())
                {
                    network.SetActivationFunction(activationType);
                    network.SetLayerSizes(layerSize);
                    network.TermCriteria = new MCvTermCriteria(iteration, learningRate); // Number of Iteration for training
                    network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop);
                    network.BackpropWeightScale   = backpropWeightScale;
                    network.BackpropMomentumScale = backpropMomentumScale;

                    //network.Save("tmp.xml"); // Save temp weights to file for correction before training

                    ActivationFunctionHardFix(network);                                                                  // Fix min max values
                    network.Read(new FileStorage("tmp.xml", FileStorage.Mode.Read).GetFirstTopLevelNode());              // Read Fixed values for training
                    TrainData training = new TrainData(sample_in, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response); // Creating training data

                    network.Train(training);                                                                             // Start Training
                    network.Save(modelName + ".xml");
                }
                return(true);
            }
            catch (Exception ee)
            {
                return(false);
            }
        }
        /// <summary>
        /// Trains SVM using Bio inspired features.
        /// </summary>
        /// <param name="trainData"> Feature vectors for training. </param>
        /// <param name="trainClasses"> Class vector of instances for training. </param>
        /// <param name="saveModelName"> saved Model name using  </param>
        /// <param name="svm_type"> Svm type. </param>
        /// <param name="C"> Cost. </param>
        /// <param name="coef0"> Coeff. </param>
        /// <param name="degree"> Degree. </param>
        /// <param name="eps"> Eps for termination criteria. </param>
        /// <param name="gamma"> Gamma. </param>
        /// <param name="kernel_type"> Kernel Type of svm. </param>
        /// <param name="nu"> Nu. </param>
        /// <param name="maxIter"> Maximun number of iterations. </param>
        /// <param name="termCritType"> Termiantion Criteria Type. </param>
        /// <returns></returns>
        public bool TrainSVM(Matrix <float> trainData, Matrix <int> trainClasses, string saveModelName, SVM.SvmType svm_type = SVM.SvmType.CSvc, int kFold = 10, double C = 1.0, double coef0 = 0.1, int degree = 3, double eps = 0.001, double gamma = 1.0, SVM.SvmKernelType kernel_type = SVM.SvmKernelType.Rbf, double nu = 0.5, int maxIter = 500, Emgu.CV.CvEnum.TermCritType termCritType = Emgu.CV.CvEnum.TermCritType.Eps)
        {
            var svmModel         = new SVM();
            var termCriteria     = new Emgu.CV.Structure.MCvTermCriteria();
            var trainSampleCount = trainData.Rows;

            svmModel.C            = C;
            svmModel.Coef0        = coef0;
            svmModel.Degree       = degree;
            svmModel.Gamma        = gamma;
            svmModel.Nu           = nu;
            svmModel.Type         = svm_type;
            termCriteria.Epsilon  = eps;
            termCriteria.MaxIter  = maxIter;
            termCriteria.Type     = termCritType;
            svmModel.TermCriteria = termCriteria;
            svmModel.P            = 1;
            kernel_type           = SVM.SvmKernelType.Linear; //for training of CNN feature data
            svmModel.SetKernel(kernel_type);
            bool trained;

            try
            {
                using (svmModel)
                {
                    TrainData td = new TrainData(trainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, trainClasses);
                    trained = svmModel.TrainAuto(td, kFold);
                    svmModel.Save(saveModelName);
                }
            }
            catch (Exception esvm)
            {
                throw esvm;
            }
            return(trained);
        }
        public void SuffleData()
        {
            TestData.Clear();
            TrainData.Clear();

            foreach (var item in RawData)
            {
                if (Random.R.NextDouble(0, 1) < TestRatio)
                {
                    TestData.Add(item.Key, item.Value);
                }
                else
                {
                    TrainData.Add(item.Key, item.Value);
                }
            }

            if (TestData.Count == 0)
            {
                var item = TrainData.Last();
                TestData.Add(item.Key, item.Value);
                TrainData.Remove(item.Key);
            }
        }
Example #24
0
        public void DataTransformersQuickExit()
        {
            PakiraDecisionTreeGenerator pakiraGenerator = PakiraGeneratorTests.CreatePakiraGeneratorInstance();
            TrainData trainData = new TrainData();

            trainData = trainData.AddSample(ImmutableList.CreateRange(new double[] { 25, 35 }), 42);
            trainData = trainData.AddSample(ImmutableList.CreateRange(new double[] { 120, 140 }), 54);
            trainData = trainData.AddSample(ImmutableList.CreateRange(new double[] { 190, 200 }), 42);

            PassThroughTransformer      passThroughTransformer      = new PassThroughTransformer();
            MeanDistanceDataTransformer meanDistanceDataTransformer = new MeanDistanceDataTransformer();

            Converter <IEnumerable <double>, IEnumerable <double> > dataTransformers = null;

            dataTransformers += meanDistanceDataTransformer.ConvertAll;

            for (int i = 0; i < 100; i++)
            {
                dataTransformers += passThroughTransformer.ConvertAll;
            }

            PakiraDecisionTreeModel pakiraDecisionTreeModel = new PakiraDecisionTreeModel(dataTransformers, trainData.Samples[0]);

            pakiraGenerator.MinimumSampleCount = 250;

            pakiraDecisionTreeModel = pakiraGenerator.Generate(pakiraDecisionTreeModel, trainData);

            pakiraDecisionTreeModel.Tree.Root.ShouldNotBeNull();

            pakiraDecisionTreeModel.PredictNode(trainData.Samples[0]).Value.ShouldBe(trainData.Labels[0]);
            pakiraDecisionTreeModel.PredictNode(trainData.Samples[1]).Value.ShouldBe(trainData.Labels[1]);
            pakiraDecisionTreeModel.PredictNode(trainData.Samples[2]).Value.ShouldBe(trainData.Labels[2]);

            // The data transformers should allow to produce a very shallow tree
            pakiraDecisionTreeModel.Tree.GetNodes().Count().ShouldBeInRange(3, 7);
        }
Example #25
0
        public void TestNormalBayesClassifier()
        {
            Bgr[] colors = new Bgr[]
            {
                new Bgr(0, 0, 255),
                new Bgr(0, 255, 0),
                new Bgr(255, 0, 0)
            };
            int trainSampleCount = 150;

            #region Generate the training data and classes

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <int>   trainClasses = new Matrix <int>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample = new Matrix <float>(1, 2);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
            trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(50));
            trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(50));

            Matrix <float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(50));
            trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(50));

            Matrix <int> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
            trainClasses1.SetValue(1);
            Matrix <int> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainClasses2.SetValue(2);
            Matrix <int> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainClasses3.SetValue(3);

            #endregion

            using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClasses))
                using (NormalBayesClassifier classifier = new NormalBayesClassifier())
                {
                    //ParamDef[] defs = classifier.GetParams();
                    classifier.Train(trainData, MlEnum.DataLayoutType.RowSample, trainClasses);
                    classifier.Clear();
                    classifier.Train(td);
#if !NETFX_CORE
                    String fileName = Path.Combine(Path.GetTempPath(), "normalBayes.xml");
                    classifier.Save(fileName);
                    if (File.Exists(fileName))
                    {
                        File.Delete(fileName);
                    }
#endif

                    #region Classify every image pixel

                    for (int i = 0; i < img.Height; i++)
                    {
                        for (int j = 0; j < img.Width; j++)
                        {
                            sample.Data[0, 0] = i;
                            sample.Data[0, 1] = j;
                            int response = (int)classifier.Predict(sample, null);

                            Bgr color = colors[response - 1];

                            img[j, i] = new Bgr(color.Blue * 0.5, color.Green * 0.5, color.Red * 0.5);
                        }
                    }

                    #endregion
                }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount / 3); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), colors[0], -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), colors[1], -1);
                PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
                img.Draw(new CircleF(p3, 2.0f), colors[2], -1);
            }

            //Emgu.CV.UI.ImageViewer.Show(img);
        }
Example #26
0
        public void TestSVM()
        {
            int trainSampleCount = 150;
            int sigma            = 60;

            #region Generate the training data and classes

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample = new Matrix <float>(1, 2);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
            trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
            trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));

            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));

            Matrix <float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
            trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainClasses2.SetValue(2);
            Matrix <float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainClasses3.SetValue(3);

            #endregion

            //using (SVM.Params p = new SVM.Params(MlEnum.SvmType.CSvc, MlEnum.SvmKernelType.Linear, 0, 1, 0, 1, 0, 0, null, new MCvTermCriteria(100, 1.0e-6)))
            using (SVM model = new SVM())
                using (Matrix <int> trainClassesInt = trainClasses.Convert <int>())
                    using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClassesInt))
                    {
                        model.Type = SVM.SvmType.CSvc;
                        model.SetKernel(SVM.SvmKernelType.Inter);
                        model.Degree       = 0;
                        model.Gamma        = 1;
                        model.Coef0        = 0;
                        model.C            = 1;
                        model.Nu           = 0;
                        model.P            = 0;
                        model.TermCriteria = new MCvTermCriteria(100, 1.0e-6);
                        //bool trained = model.TrainAuto(td, 5);
                        model.Train(td);
#if !NETFX_CORE
                        String fileName = "svmModel.xml";
                        //String fileName = Path.Combine(Path.GetTempPath(), "svmModel.xml");
                        model.Save(fileName);

                        SVM         model2 = new SVM();
                        FileStorage fs     = new FileStorage(fileName, FileStorage.Mode.Read);
                        model2.Read(fs.GetFirstTopLevelNode());

                        if (File.Exists(fileName))
                        {
                            File.Delete(fileName);
                        }
#endif

                        for (int i = 0; i < img.Height; i++)
                        {
                            for (int j = 0; j < img.Width; j++)
                            {
                                sample.Data[0, 0] = j;
                                sample.Data[0, 1] = i;

                                float response = model.Predict(sample);

                                img[i, j] =
                                    response == 1
                                ? new Bgr(90, 0, 0)
                                : response == 2
                                    ? new Bgr(0, 90, 0)
                                    : new Bgr(0, 0, 90);
                            }
                        }
                        Mat supportVectors = model.GetSupportVectors();
                        //TODO: find out how to draw the support vectors
                        Image <Gray, float> pts     = supportVectors.ToImage <Gray, float>();
                        PointF[]            vectors = new PointF[supportVectors.Rows];
                        GCHandle            handler = GCHandle.Alloc(vectors, GCHandleType.Pinned);
                        using (
                            Mat vMat = new Mat(supportVectors.Rows, supportVectors.Cols, DepthType.Cv32F, 1,
                                               handler.AddrOfPinnedObject(), supportVectors.Cols * 4))
                        {
                            supportVectors.CopyTo(vMat);
                        }
                        handler.Free();


                        Mat supportVec = model.GetSupportVectors();

                        /*
                         * for (int i = 0; i < c; i++)
                         * {
                         * float[] v = model.GetSupportVector(i);
                         * PointF p1 = new PointF(v[0], v[1]);
                         * img.Draw(new CircleF(p1, 4), new Bgr(128, 128, 128), 2);
                         * }*/
                    }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount / 3); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
                PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
                img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
            }

            //Emgu.CV.UI.ImageViewer.Show(img);
        }
    private void SwapTrainData( int _i , int _j )
    {
        int iter = 0 ;
        TrainData td_i = null ;
        TrainData td_j = null ;
        TrainData td_tmp = new TrainData() ;
        foreach( TrainData td in m_LevelGeneratorStaticPtr.m_TrainData.Values )
        {
            if( iter == _i )
            {
                td_i = td ;
            }
            else if( iter == _j )
            {
                td_j = td ;
            }
            ++iter ;
        }

        if( null != td_i &&
            null != td_j )
        {
            td_tmp.CopyFrom( td_j ) ;
            td_j.CopyFrom( td_i );
            td_i.CopyFrom( td_tmp ) ;
        }
    }
    private void DrawEditor_TrainsData()
    {
        GUILayout.Label( "Insert" ) ;
        GUILayout.BeginHorizontal() ;

        string inputTrainLabel = "" ;
        DrawTrainData( ref inputTrainLabel ) ;
        if( true == GUILayout.Button( "Insert" ) &&
            0 != inputTrainLabel.Length )
        {
            TrainData newTD = new TrainData() ;
            newTD.ID = m_LevelGeneratorStaticPtr.GetANewTrainID() ;
            newTD.DisplayName = inputTrainLabel ;

            Debug.Log( "newSD.ID" +  newTD.ID ) ;
            m_LevelGeneratorStaticPtr.m_TrainData.Add( newTD.ID , newTD ) ;
            m_LevelGeneratorStaticPtr.ReCreateTrainDisplayByData() ;
        }
        GUILayout.EndHorizontal() ;

        GUILayout.BeginHorizontal() ;
        GUILayout.Label( "Train Table" ) ;
        GUILayout.Label( "Page Start Index:" + m_StationStartIndex ) ;
        if( true == GUILayout.Button( "Previous Page" ) )
        {
            m_TrainStartIndex -= m_TrainNumInPage ;
            if( m_TrainStartIndex < 0 )
                m_TrainStartIndex = 0 ;
        }
        if( true == GUILayout.Button( "Next Page" ) )
        {
            m_TrainStartIndex += m_TrainNumInPage ;
            if( m_TrainStartIndex >= m_LevelGeneratorStaticPtr.m_TrainData.Count )
                m_TrainStartIndex -= m_TrainNumInPage ;
        }
        GUILayout.EndHorizontal() ;

        int i = 0 ;
        foreach( TrainData td in m_LevelGeneratorStaticPtr.m_TrainData.Values )
        {
            if( i < m_TrainStartIndex || i >= m_TrainStartIndex + m_TrainNumInPage )
            {
                ++i ;
                continue ;
            }

            GUILayout.BeginHorizontal() ;

            string PreDisplayName = td.DisplayName ;

            DrawTrainData( ref PreDisplayName ) ;

            td.DisplayName = PreDisplayName ;

            if( true == GUILayout.Button( "MoveUp" ) )
            {
                SwapTrainData( i , i-1 ) ;
                return ;// end this round
            }
            if( true == GUILayout.Button( "MoveDown" ) )
            {
                SwapTrainData( i , i+1 ) ;
                return ;// end this round
            }
            if( true == GUILayout.Button( "Remove" ) )
            {
                m_LevelGeneratorStaticPtr.RemoveTrain( td.ID ) ;
                return ;// end this round
            }
            if( true == GUILayout.Button( "SetRoutes" ) )
            {
                m_SetRouteTrainID = td.ID ;
                m_DisplayRouteOfTrain = true ;
                m_DisplayTrainData = false ;
                return ;// end this round
            }
            GUILayout.EndHorizontal() ;
            ++i ;
        }
    }
Example #29
0
        public void Train(Problem prob, string weakLearnerName, string[] weakLearnerArgs, int iter)
        {
            //Creat weaklearner and traindata
            Assembly    asm        = Assembly.GetAssembly(typeof(WeakLearner));
            WeakLearner srcLearner = (WeakLearner)asm.CreateInstance(typeof(WeakLearner).Namespace + "." + weakLearnerName, true);

            srcLearner.InitLearningOptions(weakLearnerArgs);
            TrainData traindata = srcLearner.CreateTrainData(prob);

            // set the smoothing value to avoid numerical problem 1/N
            //"Improved boosting algorithms using confidence-rated predictions". chapter 4.2
            double smoothingVal = 1.0 / traindata.N;

            //init weight
            double[] weight = new double[traindata.N];
            for (int t = 0; t < weight.Length; t++)
            {
                weight[t] = smoothingVal;
            }

            //show sth
            Console.WriteLine("\tStrongLearner:{0}", this.GetType().Name);
            Console.WriteLine("\tWeakLearner:{0}", weakLearnerName);
            int cursorX = Console.CursorLeft;
            int cursorY = Console.CursorTop;

            //start iterating
            _weakLearners = new SortedList <int, WeakLearner>(iter);
            for (int t = 0; t < iter; t++)
            {
                //creat a new learner from srcLearner
                WeakLearner subLearner = (WeakLearner)asm.CreateInstance(srcLearner.GetType().FullName);//srcLearner.CreateSubLearner();

                //init args again
                subLearner.InitLearningOptions(weakLearnerArgs);

                //train the learner(the suboptimal solution with current weight)

                double Pm = subLearner.Train(traindata, weight);
                if (Pm >= 0.5)
                {
                    throw new Exception(Messege.CouldNotClassify);
                }

                //calculate Alpha
                //note : eps_min = Pm , eps_pls = 1-Pm
                double   eps_min = 0.0, eps_pls = 0.0;
                double[] result = new double[prob.N];
                for (int n = 0; n < prob.N; n++)
                {
                    result[n] = subLearner.Classify(prob.X[n]);
                    if ((result[n] * prob.Y[n]) < 0)
                    {
                        eps_min += weight[n];
                    }
                    if ((result[n] * prob.Y[n]) > 0)
                    {
                        eps_pls += weight[n];
                    }
                }

                double Alpha = 0.5 * Math.Log((eps_pls + smoothingVal) / (eps_min + smoothingVal));
                subLearner.Alpha = Alpha;

                //update weight
                double Z = 0;
                for (int n = 0; n < prob.N; n++)
                {
                    weight[n] = weight[n] * Math.Exp(-1 * prob.Y[n] * result[n] * Alpha);
                    Z        += weight[n];
                }
                for (int n = 0; n < prob.N; n++)
                {
                    weight[n] /= Z;
                }

                //test
                double sum = 0;
                for (int n = 0; n < prob.N; n++)
                {
                    sum += weight[n];
                }

                //save
                _weakLearners.Add(t, subLearner);

                //show sth
                Console.SetCursorPosition(cursorX, cursorY);
                Console.WriteLine("\titerations {0}/{1}", t + 1, iter);
            }
        }
Example #30
0
 public void CopyFrom( TrainData _Src )
 {
     Setup( this.ID ,
            _Src.m_DisplayName ) ;
     m_TimeTable = _Src.m_TimeTable ;
 }
        private async void moduleFeatureExtraction(int first, int last)
        {
            string fghfh = "";

            double[,] RawData = new double[16, 3780];
            int mid  = (first + last) / 2;
            int low  = mid - 8;;
            int high = mid + 8;

            for (int i = 0; i < 16; i++)
            {
                for (int j = 0; j < 26; j++)
                {
                    if (j == adasas)
                    {
                        response[i, j] = 1;
                    }
                    if (j != adasas)
                    {
                        response[i, j] = 0;
                    }
                }
            }
            adasas++;
            if (low < first)
            {
                low++;
            }
            if (high > last)
            {
                low++;
            }
            int length = high - low;

            for (int k = (low); k < (high); k++)
            {
                string            frameName             = "gesture//" + k + ".jpeg";
                Image <Bgr, byte> featurExtractionInput = new Image <Bgr, byte>(frameName);
                //pictureBox3.Image = featurExtractionInput.Bitmap;
                //label4.Text = k.ToString();
                await Task.Delay(1000 / Convert.ToInt32(2));

                float[] desc = new float[3780];
                desc = GetVector(featurExtractionInput);

                int i = k - (low);
                for (int j = 0; j < 3780; j++)
                {
                    double val = Convert.ToDouble(desc[j]);
                    RawData.SetValue(val, i, j);
                }

                if (k == (high - 1))
                {
                    Matrix <Double> DataMatrix   = new Matrix <Double>(RawData);
                    Matrix <Double> Mean         = new Matrix <Double>(1, 3780);
                    Matrix <Double> EigenValues  = new Matrix <Double>(1, 3780);
                    Matrix <Double> EigenVectors = new Matrix <Double>(3780, 3780);
                    CvInvoke.PCACompute(DataMatrix, Mean, EigenVectors, 16);
                    Matrix <Double> result = new Matrix <Double>(16, 16);
                    CvInvoke.PCAProject(DataMatrix, Mean, EigenVectors, result);


                    String        filePath = @"test.xml";
                    StringBuilder sb       = new StringBuilder();
                    (new XmlSerializer(typeof(Matrix <double>))).Serialize(new StringWriter(sb), result);
                    XmlDocument xDoc = new XmlDocument();
                    xDoc.LoadXml(sb.ToString());

                    System.IO.File.WriteAllText(filePath, sb.ToString());
                    Matrix <double> matrix = (Matrix <double>)(new XmlSerializer(typeof(Matrix <double>))).Deserialize(new XmlNodeReader(xDoc));

                    string djf = null;
                    djf  = System.IO.File.ReadAllText(@"g.txt");
                    djf += Environment.NewLine;
                    djf += Environment.NewLine;
                    for (int p = 0; p < 16; p++)
                    {
                        for (int q = 0; q < 16; q++)
                        {
                            djf += p + " , " + q + "  " + matrix[p, q].ToString() + "    ";
                        }
                        djf += Environment.NewLine;
                    }
                    Matrix <float> masjhdb   = result.Convert <float>();
                    TrainData      trainData = new TrainData(masjhdb, DataLayoutType.RowSample, response);
                    int            features  = 16;
                    int            classes   = 26;
                    Matrix <int>   layers    = new Matrix <int>(6, 1);
                    layers[0, 0] = features;
                    layers[1, 0] = classes * 16;
                    layers[2, 0] = classes * 8;
                    layers[3, 0] = classes * 4;
                    layers[4, 0] = classes * 2;
                    layers[5, 0] = classes;
                    ANN_MLP     ann             = new ANN_MLP();
                    FileStorage fileStorageRead = new FileStorage(@"abc.xml", FileStorage.Mode.Read);
                    ann.Read(fileStorageRead.GetRoot(0));
                    ann.SetLayerSizes(layers);
                    ann.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                    ann.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0, 0);
                    ann.Train(masjhdb, DataLayoutType.RowSample, response);
                    FileStorage fileStorageWrite = new FileStorage(@"abc.xml", FileStorage.Mode.Write);
                    ann.Write(fileStorageWrite);
                    Matrix <float> hehe = new Matrix <float>(1, 16);
                    for (int q = 0; q < 16; q++)
                    {
                        hehe[0, q] = masjhdb[11, q];
                    }
                    float real = ann.Predict(hehe);

                    fghfh += array[(int)real];
                    SpeechSynthesizer reader = new SpeechSynthesizer();

                    if (richTextBox1.Text != " ")
                    {
                        reader.Dispose();
                        reader = new SpeechSynthesizer();
                        reader.SpeakAsync(fghfh.ToString());
                    }
                    else
                    {
                        MessageBox.Show("No Text Present!");
                    }
                    richTextBox1.Text = fghfh.ToString();
                    System.IO.File.WriteAllText(@"g.txt", real.ToString());
                }
            }
        }
            static public bool DealWithJson(string clientUrl, string trainDataJsonStr)
            {
                bool          retValue = false;
                TrainDataJson trainDataJSON;
                int           socketIndex = socketUrlList.IndexOf(clientUrl);

                try
                {
                    trainDataJSON = Deserialize(trainDataJsonStr);
                    trainDataJSON.operateReport = false;
                    Request currentRequest = (Request)Enum.Parse(typeof(Request), trainDataJSON.requestMode, true);
                    trainDataJSON.requestMode = Request.RESPONSE.ToString();
                    if (trainDataJSON.deserialized)
                    {
                        if (currentRequest == Request.RESPONSE)
                        {
                            socketState[socketIndex] = StateManager.READY;
                        }
                        if (socketState[socketIndex] == StateManager.READY)
                        {
                            try
                            {
                                switch (currentRequest)
                                {
                                case Request.SET_MODE:
                                {
                                    trainDataJSON.requestMode = Request.RESPONSE.ToString();
                                    ClientType clientType = (ClientType)Enum.Parse(typeof(ClientType), trainDataJSON.requestClientType, true);
                                    SetSocketDataProvider(clientUrl, clientType);
                                    socketReadBuffer[socketIndex]   = string.Empty;
                                    socketState[socketIndex]        = StateManager.READY;
                                    trainDataJSON.requestClientType = string.Empty;
                                    trainDataJSON.dataRequestList.Clear();
                                    trainDataJSON.dataResponseList.Clear();
                                    break;
                                }

                                case Request.REQUEST_DATA_LIST:
                                {
                                    trainDataJSON.requestMode       = Request.SET_DATA.ToString();
                                    trainDataJSON.requestClientType = string.Empty;
                                    trainDataJSON.dataResponseList.Clear();
                                    trainDataJSON.dataRequestList.Clear();
                                    for (int i = 0; i < DataManager.processData.GetTrainDataCount(); i++)
                                    {
                                        TrainData trainData = new TrainData();
                                        trainData.trainData         = string.Empty;
                                        trainData.trainDataID       = i;
                                        trainData.trainDataType     = DataManager.processData.trainDataType[i].ToString();
                                        trainData.trainDataClassify = DataManager.processData.trainDataClassify[i].ToString();
                                        trainDataJSON.dataRequestList.Add(i);
                                        trainDataJSON.dataResponseList.Add(trainData);
                                    }
                                    socketState[socketIndex] = StateManager.WAITTING_SUCC;
                                    break;
                                }

                                case Request.REQUEST_DATA:
                                {
                                    List <int> currentDataRequest = new List <int>();
                                    trainDataJSON.requestMode       = Request.SET_DATA.ToString();
                                    trainDataJSON.requestClientType = string.Empty;
                                    trainDataJSON.dataResponseList.Clear();
                                    currentDataRequest.Clear();
                                    foreach (int dataIndex in trainDataJSON.dataRequestList)
                                    {
                                        TrainData trainData = new TrainData();
                                        trainData.trainDataID       = dataIndex;
                                        trainData.trainDataType     = DataManager.processData.trainDataType[dataIndex].ToString();
                                        trainData.trainDataClassify = DataManager.processData.trainDataClassify[dataIndex].ToString();
                                        trainData.trainData         = TrainData2Str(dataIndex);
                                        currentDataRequest.Add(trainDataJSON.dataResponseList.Count);
                                        trainDataJSON.dataResponseList.Add(trainData);
                                    }
                                    trainDataJSON.dataRequestList.Clear();
                                    trainDataJSON.dataRequestList = currentDataRequest;
                                    socketState[socketIndex]      = StateManager.WAITTING_SUCC;
                                    break;
                                }

                                case Request.REQUEST_ALL_DATA:
                                {
                                    trainDataJSON.requestMode       = Request.SET_DATA.ToString();
                                    trainDataJSON.requestClientType = string.Empty;
                                    trainDataJSON.dataResponseList.Clear();
                                    trainDataJSON.dataRequestList.Clear();
                                    for (int dataIndex = 0; dataIndex < DataManager.processData.GetTrainDataCount(); dataIndex++)
                                    {
                                        TrainData trainData = new TrainData();
                                        trainData.trainDataID       = dataIndex;
                                        trainData.trainDataType     = DataManager.processData.trainDataType[dataIndex].ToString();
                                        trainData.trainDataClassify = DataManager.processData.trainDataClassify[dataIndex].ToString();
                                        trainData.trainData         = TrainData2Str(dataIndex);
                                        trainDataJSON.dataRequestList.Add(dataIndex);
                                        trainDataJSON.dataResponseList.Add(trainData);
                                    }
                                    socketState[socketIndex] = StateManager.WAITTING_SUCC;
                                    break;
                                }

                                case Request.SET_DATA:
                                {
                                    trainDataJSON.requestMode = Request.RESPONSE.ToString();
                                    try
                                    {
                                        foreach (int dataListIndex in trainDataJSON.dataRequestList)
                                        {
                                            int dataIndex = trainDataJSON.dataResponseList[dataListIndex].trainDataID;
                                            ApplyTrainData(dataIndex, trainDataJSON.dataResponseList[dataListIndex].trainData);
                                        }
                                    }
                                    catch (Exception) { };
                                    trainDataJSON.requestClientType = string.Empty;
                                    trainDataJSON.dataResponseList.Clear();
                                    trainDataJSON.dataRequestList.Clear();
                                    socketState[socketIndex] = StateManager.READY;
                                    break;
                                }

                                case Request.PAUSE_SIMULATOR:
                                {
                                    trainDataJSON.requestMode = Request.RESPONSE.ToString();
                                    RProxy.SimCoreClient.PauseSimulator(true);
                                    trainDataJSON.requestClientType = string.Empty;
                                    trainDataJSON.dataResponseList.Clear();
                                    trainDataJSON.dataRequestList.Clear();
                                    socketState[socketIndex] = StateManager.READY;
                                    break;
                                }

                                case Request.CONTINUE_SIMULATOR:
                                {
                                    trainDataJSON.requestMode = Request.RESPONSE.ToString();
                                    RProxy.SimCoreClient.PauseSimulator(false);
                                    trainDataJSON.requestClientType = string.Empty;
                                    trainDataJSON.dataResponseList.Clear();
                                    trainDataJSON.dataRequestList.Clear();
                                    socketState[socketIndex] = StateManager.READY;
                                    break;
                                }
                                }
                                trainDataJSON.operateReport = true;
                            }
                            catch (Exception) { };
                            socketList[socketIndex].Send(Serialize(trainDataJSON));
                            retValue = true;
                        }
                    }
                }
                catch (Exception) { };
                return(retValue);
            }
Example #33
0
    public bool FindTrainBetween( int _SpcifiedHour ,
								  int _SpcifiedMinite , 
								  TrainData _TrainData , 
								  Dictionary<int , StationData> _StationDataVec ,
								  ref int _StationID_i ,
								  ref int _StationID_j )
    {
        Debug.Log( "FindTrainBetween() " + _SpcifiedHour + ":" + _SpcifiedMinite ) ;

        int j = 0 ;
        int totalMinSpcified = _SpcifiedHour * 60 + _SpcifiedMinite ;

        int totalMin_i = 0 ;
        int totalMin_j = 0 ;
        for( int i = 0 ; i < _TrainData.m_TimeTable.Count - 1 ; ++i )
        {
            j = i + 1 ;

            int id_i = GetStationIDByStationName( _TrainData.m_TimeTable[ i ].Station ) ;
            int id_j = GetStationIDByStationName( _TrainData.m_TimeTable[ j ].Station ) ;

            if( false == _StationDataVec.ContainsKey( id_i ) ||
                false == _StationDataVec.ContainsKey( id_j ) )
                continue ;

            // Vector3 pos_i = _StationDataVec[ id_i ].Position ;
            // Vector3 pos_j = _StationDataVec[ id_j ].Position ;

            totalMin_i = _TrainData.m_TimeTable[ i ].Hour * 60 + _TrainData.m_TimeTable[ i ].Minite ;
            totalMin_j = _TrainData.m_TimeTable[ j ].Hour * 60 + _TrainData.m_TimeTable[ j ].Minite ;

            DebugLog( "FindTrainBetween() totalMinSpcified" +
                totalMinSpcified + " totalMin_i" + totalMin_i + " totalMin_j" + totalMin_j ) ;

            if( totalMinSpcified >= totalMin_i &&
                totalMinSpcified < totalMin_j )
            {
                // 之間
                _StationID_i = id_i ;
                _StationID_j = id_j ;
                return true ;
            }
            else if( j == _TrainData.m_TimeTable.Count - 1 &&
                     	  totalMinSpcified >= totalMin_j )
            {
                // 最後一個
                _StationID_i = id_j ;
                _StationID_j = id_j ;
                return true ;
            }
            else if( i == 0 &&
                     totalMinSpcified < totalMin_i )
            {
                // 第一站
                _StationID_i = id_i ;
                _StationID_j = id_i ;
                return true ;
            }
        }
        return false ;
    }
 public MainWindowViewModel()
 {
     QueryRequest = new QueryRequest();
     QueryResult  = new TrainData[0];
     ShowResult   = new ObservableCollection <TrainData>();
 }
Example #35
0
    private Vector3 FindTrainPosition( int _SpcifiedHour ,
									   int _SpcifiedMinite , 
									   TrainData _TrainData , 
									   Dictionary<int , StationData> _StationDataVec ,
	                                   ref int _TargetID )
    {
        DebugLog( _SpcifiedHour + ":" + _SpcifiedMinite ) ;
        Vector3 ret = Vector3.zero ;
        int j = 0 ;

        int totalMinSpcified = _SpcifiedHour * 60 + _SpcifiedMinite ;
        int totalMin_i = 0 ;
        int totalMin_j = 0 ;
        for( int i = 0 ; i < _TrainData.m_TimeTable.Count - 1 ; ++i )
        {
            j = i + 1 ;

            int id_i = pLevelGenerator.GetStationIDByStationName( _TrainData.m_TimeTable[ i ].Station ) ;
            int id_j = pLevelGenerator.GetStationIDByStationName( _TrainData.m_TimeTable[ j ].Station ) ;
            DebugLog( "_TrainData.m_TimeTable[ i ].Station" + _TrainData.m_TimeTable[ i ].Station ) ;
            DebugLog( "_TrainData.m_TimeTable[ j ].Station" + _TrainData.m_TimeTable[ j ].Station ) ;

            if( false == _StationDataVec.ContainsKey( id_i ) ||
                false == _StationDataVec.ContainsKey( id_j ) )
            {
                DebugLog( "false == _StationDataVec" + id_i + ","+ id_j ) ;
                continue ;
            }

            Vector3 pos_i = _StationDataVec[ id_i ].Position ;
            Vector3 pos_j = _StationDataVec[ id_j ].Position ;

            totalMin_i = _TrainData.m_TimeTable[ i ].Hour * 60 + _TrainData.m_TimeTable[ i ].Minite ;
            totalMin_j = _TrainData.m_TimeTable[ j ].Hour * 60 + _TrainData.m_TimeTable[ j ].Minite ;

            DebugLog( "totalMinSpcified=" + totalMinSpcified + " totalMin_i=" + totalMin_i + " totalMin_j" + totalMin_j ) ;

            float interpolateValue =   (float) ( totalMinSpcified - totalMin_i ) /
                                     ( (float) totalMin_j - (float) totalMin_i ) ;
            if( totalMinSpcified >= totalMin_i &&
                totalMinSpcified < totalMin_j )
            {
                // 在i,j之間
                _TargetID = id_j ;
                // Debug.Log( "interpolateValue" + interpolateValue ) ;
                ret = Vector3.Lerp( pos_i ,
                              pos_j ,
                              interpolateValue ) ;
                DebugLog( "pos_i" + pos_i ) ;
                DebugLog( "pos_j" + pos_j ) ;
                DebugLog( "ret" + ret ) ;
                break ;
            }
            else if( j == _TrainData.m_TimeTable.Count - 1 &&
                     	  totalMinSpcified >= totalMin_j )
            {
                // 在最後一位
                _TargetID = id_j ;
                DebugLog( "pos_j" + pos_j ) ;
                ret = Vector3.Lerp( pos_i ,
                              pos_j ,
                             1 ) ;
                break ;
            }
            else if( i == 0 &&
                     totalMinSpcified < totalMin_i )
            {
                // 在第一位
                _TargetID = id_i ;
                DebugLog( "pos_i" + pos_i ) ;
                ret = Vector3.Lerp( pos_i ,
                              pos_j ,
                             0 ) ;
                break ;
            }
        }

        ret.z = pLevelGenerator.m_TrainLayerZShift ;
        DebugLog( "ret" + ret ) ;
        return ret ;
    }
Example #36
0
        public void TestRTreesLetterRecognition()
        {
            Matrix <float> data, response;

            ReadLetterRecognitionData(out data, out response);

            int trainingSampleCount = (int)(data.Rows * 0.8);

            Matrix <Byte> varType = new Matrix <byte>(data.Cols + 1, 1);

            varType.SetValue((byte)MlEnum.VarType.Numerical);         //the data is numerical
            varType[data.Cols, 0] = (byte)MlEnum.VarType.Categorical; //the response is catagorical

            Matrix <byte> sampleIdx = new Matrix <byte>(data.Rows, 1);

            using (Matrix <byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
                sampleRows.SetValue(255);

            using (RTrees forest = new RTrees())
                using (
                    TrainData td = new TrainData(data, MlEnum.DataLayoutType.RowSample, response, null, sampleIdx, null,
                                                 varType))
                {
                    forest.MaxDepth               = 10;
                    forest.MinSampleCount         = 10;
                    forest.RegressionAccuracy     = 0.0f;
                    forest.UseSurrogates          = false;
                    forest.MaxCategories          = 15;
                    forest.CalculateVarImportance = true;
                    forest.ActiveVarCount         = 4;
                    forest.TermCriteria           = new MCvTermCriteria(100, 0.01f);
                    bool success = forest.Train(td);

                    if (!success)
                    {
                        return;
                    }

                    double trainDataCorrectRatio = 0;
                    double testDataCorrectRatio  = 0;
                    for (int i = 0; i < data.Rows; i++)
                    {
                        using (Matrix <float> sample = data.GetRow(i))
                        {
                            double r = forest.Predict(sample, null);
                            r = Math.Abs(r - response[i, 0]);
                            if (r < 1.0e-5)
                            {
                                if (i < trainingSampleCount)
                                {
                                    trainDataCorrectRatio++;
                                }
                                else
                                {
                                    testDataCorrectRatio++;
                                }
                            }
                        }
                    }

                    trainDataCorrectRatio /= trainingSampleCount;
                    testDataCorrectRatio  /= (data.Rows - trainingSampleCount);

                    StringBuilder builder = new StringBuilder("Variable Importance: ");

                    /*
                     * using (Matrix<float> varImportance = forest.VarImportance)
                     * {
                     * for (int i = 0; i < varImportance.Cols; i++)
                     * {
                     *    builder.AppendFormat("{0} ", varImportance[0, i]);
                     * }
                     * }*/

                    EmguAssert.WriteLine(String.Format("Prediction accuracy for training data :{0}%",
                                                       trainDataCorrectRatio * 100));
                    EmguAssert.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio * 100));
                    EmguAssert.WriteLine(builder.ToString());
                }
        }
Example #37
0
        public void TestANN_MLP()
        {
            int trainSampleCount = 100;

            #region Generate the traning data and classes

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample     = new Matrix <float>(1, 2);
            Matrix <float> prediction = new Matrix <float>(1, 1);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
            trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);

            #endregion

            using (Matrix <int> layerSize = new Matrix <int>(new int[] { 2, 5, 1 }))
                using (Mat layerSizeMat = layerSize.Mat)

                    using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClasses))
                        using (ANN_MLP network = new ANN_MLP())
                        {
                            network.SetLayerSizes(layerSizeMat);
                            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
                            network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
                            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
                            network.Train(td, (int)Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);

#if !NETFX_CORE
                            String fileName = Path.Combine(Path.GetTempPath(), "ann_mlp_model.xml");
                            network.Save(fileName);
                            if (File.Exists(fileName))
                            {
                                File.Delete(fileName);
                            }
#endif

                            for (int i = 0; i < img.Height; i++)
                            {
                                for (int j = 0; j < img.Width; j++)
                                {
                                    sample.Data[0, 0] = j;
                                    sample.Data[0, 1] = i;
                                    network.Predict(sample, prediction);

                                    // estimates the response and get the neighbors' labels
                                    float response = prediction.Data[0, 0];

                                    // highlight the pixel depending on the accuracy (or confidence)
                                    img[i, j] = response < 1.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
                                }
                            }
                        }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
            }

            //Emgu.CV.UI.ImageViewer.Show(img);
        }
Example #38
0
    private void LoadTrainTable( string _Filepath )
    {
        DebugLog( "LoadTrainTable() start." ) ;

        DestroyTrainData() ;

        TextAsset ta = (TextAsset) Resources.Load( _Filepath , typeof(TextAsset) ) ;
        if( null == ta )
        {
            Debug.LogError( "LoadTrainTable() _Filepath load failed=" + _Filepath ) ;
            return ;
        }

        // m_TrainData
        string content = ta.text ;
        DebugLog( "content=" + content ) ;
        string[] splitor1 = { "\r\n" , "\n" , "\r" }  ;
        // Debug.Log( "splitor1=" + splitor1[0] ) ;
        TrainData trainData = null ;
        string []lineVec = content.Split( splitor1 , System.StringSplitOptions.None ) ;
        DebugLog( "lineVec.Length=" + lineVec.Length ) ;
        for( int i = 0 ; i < lineVec.Length ; ++i )
        {
            if( 0 < lineVec[ i ].Length )
            {
        #if DEBUG
                Debug.Log( lineVec[ i ] ) ;
        #endif
                trainData = new TrainData() ;

                trainData.ParseFromString( lineVec[ i ] ) ;
                m_TrainData.Add( trainData.ID , trainData ) ;
                DebugLog( "m_TrainData.Add=" + trainData.DisplayName ) ;
            }
        }
        DebugLog( "LoadTrainTable() end." ) ;
    }