Ejemplo n.º 1
0
    private void RemoveDuplicats(List <ResultBox> boxes, float nms_thresh)
    {
        if (boxes.Count == 0)
        {
            return;
        }

        for (int c = 0; c < classesNum; c++)
        {
            float[] classValues = new float[boxes.Count];
            classValues.Update((x, i) => boxes[i].classes[c]);

            int[] sortedIndexes = _sortIdx(classValues);

            for (int i = 0; i < boxes.Count; i++)
            {
                int i_index = sortedIndexes[i];
                if (boxes[i_index].classes[c] == 0)
                {
                    continue;
                }

                for (int j = i + 1; j < boxes.Count; j++)
                {
                    int j_index = sortedIndexes[j];
                    if (NNUtils.BoxesIOU(boxes[i_index].rect, boxes[j_index].rect) >= nms_thresh)
                    {
                        boxes[j_index].classes[c] = 0;
                    }
                }
            }
        }
    }
Ejemplo n.º 2
0
    private void PrepareData(Category category, byte[] data, eDoodleCategory label)
    {
        int trainingdata_num = (int)(useTrainingData * total_data);

        category.training = new DoodleData[trainingdata_num];
        category.testing  = new DoodleData[total_data - trainingdata_num];

        for (int i = 0; i < total_data; i++)
        {
            int offset = i * len;
            int strat  = offset;
            int end    = (offset + len) - 1;
            if (i < trainingdata_num)
            {
                // training data
                category.training[i]       = new DoodleData();
                category.training[i].val   = NNUtils.SubArray(data, strat, end);
                category.training[i].label = label;
            }
            else
            {
                // testing data
                category.testing[i - trainingdata_num]       = new DoodleData();
                category.testing[i - trainingdata_num].val   = NNUtils.SubArray(data, strat, end);
                category.testing[i - trainingdata_num].label = label;
            }
        }
    }
Ejemplo n.º 3
0
        static void TestNN(MultiLayerNetwork nw, List <GradeDigest> testDigests, int run)
        {
            List <Tuple <bool, double> > results = new List <Tuple <bool, double> >();

            foreach (var gd in testDigests)
            {
                double[] output = new double[4];
                nw.NetOUT(NNUtils.ToNetworkInput(GradeDigest.UnpackBits(gd.data)), out output);

                int    ans        = inputCodes[NNUtils.Answer(output)];
                double certainity = NNUtils.AnswerConfidence(output);
                results.Add(new Tuple <bool, double>(ans == gd.grade, certainity));
            }

            double confidenceThreshold = results.Select(t => t.Item2).OrderBy(x => x).ElementAt((int)Math.Floor(results.Count * 0.95));

            confidenceThreshold = 0.0001;

            int testSuccess = 0;
            int testFailure = 0;

            int sureTestSuccess = 0;
            int sureTestFailure = 0;
            int unsure          = 0;

            foreach (var res in results)
            {
                if (res.Item1)
                {
                    testSuccess++;
                }
                else
                {
                    testFailure++;
                }

                if (res.Item2 > confidenceThreshold)
                {
                    unsure++;
                }
                else if (res.Item1)
                {
                    sureTestSuccess++;
                }
                else
                {
                    sureTestFailure++;
                }
            }


            Func <int, double> perc = x => ((double)x / testDigests.Count * 100);

            Console.WriteLine("Test results (r/w%): {0:F2}/{1:F2}", perc(testSuccess), perc(testFailure));
            Console.WriteLine("Test results (r/u/w%): {0:F2}/{1:F2}/{2:F2} (confidence threshold = {3})",
                              perc(sureTestSuccess), perc(unsure), perc(sureTestFailure), confidenceThreshold);

            nw.SaveNW(String.Format("e:/Pronko/prj/Grader/ocr-data/grade-recognition_{0}_{1:F2}_{2:F2}.nn",
                                    run, perc(testSuccess), perc(sureTestFailure)));
        }
Ejemplo n.º 4
0
    public void SaveModel()
    {
        string jsonstr = nn.ToJSON();

        NNUtils.SaveText(trainedModelFile, jsonstr);
        Debug.Log(jsonstr);
    }
Ejemplo n.º 5
0
        public static RecognitionResult RecognizeGrade(GradeDigest digest)
        {
            List <int> gradeCodes = new List <int> {
                2, 3, 4, 5
            };

            double[] output = new double[4];
            gradeRecognitionNetwork.NetOUT(NNUtils.ToNetworkInput(GradeDigest.UnpackBits(digest.data)), out output);
            return(new RecognitionResult(
                       grade: gradeCodes[NNUtils.Answer(output)],
                       confident: NNUtils.AnswerConfidence(output) < recognitionConfidenceThreshold
                       ));
        }
Ejemplo n.º 6
0
        static void Main(string[] args)
        {
            MultiLayerNetwork nw = new MultiLayerNetwork(GradeDigest.dataSize, new int[] { 100, 4 });

            List <GradeDigest> trainDigests = GradeDigestSet.staticInstance.GetDigestList();
            List <GradeDigest> testDigests  = GradeDigestSet.Read("e:/Pronko/prj/Grader/ocr-data/test-data/grade-digests.db").GetDigestList();

            for (int trainingRun = 1; trainingRun <= 100; trainingRun++)
            {
                Util.Timed(String.Format("training run #{0}", trainingRun), () => {
                    int c = 0;
                    foreach (var gd in trainDigests.Shuffle())
                    {
                        double[] desiredOutput = new double[4];
                        desiredOutput[inputCodes.IndexOf(gd.grade)] = 1;
                        nw.LearnNW(NNUtils.ToNetworkInput(GradeDigest.UnpackBits(gd.data)), desiredOutput, 0.1);
                        c++;
                    }
                });

                TestNN(nw, testDigests, trainingRun);
            }
        }
Ejemplo n.º 7
0
    public void TrainEpoch()
    {
        classifierState = eClassifierState.TRAIN;
        training        = NNUtils.Shuffle(training);
        for (int i = 0; i < training.Length; i++)
        {
            DoodleData   data   = training[i];
            List <float> inputs = new List <float>();
            for (int j = 0; j < data.val.Length; j++)
            {
                inputs.Add(data.val[j] / 255.0f);
            }

            eDoodleCategory label   = data.label;
            List <float>    targets = new List <float>()
            {
                0f, 0f, 0f
            };
            targets[(int)label] = 1;

            nn.train(inputs, targets);
        }
        classifierState = eClassifierState.NONE;
    }
Ejemplo n.º 8
0
    public void LoadModel()
    {
        string jsonTxt = NNUtils.LoadText(trainedModelFile);

        nn = NeuralNetwork.CreateFromJSON(jsonTxt);
    }