Esempio n. 1
0
        public List <ImageDescription> computeImageForLayers(ImageDescription inputImage, int numberOfLayersToCompute)
        {
            List <ImageDescription> computedImages = new List <ImageDescription>(numberOfLayersToCompute);

            for (int i = 0; i < numberOfLayersToCompute; i++)
            {
                EdgeDetectionAlgorithm algorithm = layers[i].algorithm;
                int layerResizeFactor            = layers[i].resizeFactor;

                ImageDescription newInputImage = null;

                ResizeFilter resizeGrayscale = new ResizeFilter(inputImage.sizeX / layerResizeFactor, inputImage.sizeY / layerResizeFactor, ImageDescriptionUtil.grayscaleChannel);
                ResizeFilter resizeColor     = new ResizeFilter(inputImage.sizeX / layerResizeFactor, inputImage.sizeY / layerResizeFactor, ImageDescriptionUtil.colorChannels);

                if (layerResizeFactor == 1)
                {
                    newInputImage = inputImage;
                }
                else
                {
                    newInputImage = resizeColor.filter(inputImage);
                }
                if (i > 0)
                {
                    ImageDescription resizedComputed = resizeGrayscale.filter(computedImages[i - 1]);
                    newInputImage.setColorChannel(ColorChannelEnum.Layer, resizedComputed.gray);
                }

                ImageDescription layerOutputImage = algorithm.test(newInputImage);
                computedImages.Add(layerOutputImage);
            }

            return(computedImages);
        }
 public static void saveToCompressedFile(EdgeDetectionAlgorithm algorithm, string filename)
 {
     using (GZipStream zippedStream = new GZipStream(new FileStream(filename, FileMode.Create), CompressionMode.Compress, false))
     {
         algorithm.save(zippedStream);
     }
 }
        public static EdgeDetectionAlgorithm loadAlgorithmFromCompressedFile(string inputModelFilename)
        {
            BinaryFormatter bf = new BinaryFormatter();

            using (GZipStream zippedStream = new GZipStream(new FileStream(inputModelFilename, FileMode.Open), CompressionMode.Decompress, false))
            {
                EdgeDetectionAlgorithm algorithm = (EdgeDetectionAlgorithm)bf.Deserialize(zippedStream);
                return(algorithm);
            }
        }
Esempio n. 4
0
        public void trainWithBaseAlgorithm(EdgeDetectionAlgorithm algorithm, EdgeDetectionAlgorithm baseAlgorithm, int resizeFactor)
        {
            DateTime      trainingStart = DateTime.Now;
            float         totalLoss     = 0;
            List <String> fileList      = new List <string>(benchmark.getTrainingFilesPathList());

            int totalNumberOfFiles = numberOfTrainingSetPasses * fileList.Count;
            int totalIndex         = 0;

            for (int pass = 0; pass < numberOfTrainingSetPasses; pass++)
            {
                ListUtils.Shuffle(fileList);
                int      index             = 1;
                float    totalPassLoss     = 0;
                DateTime trainingPassStart = DateTime.Now;
                foreach (string trainingFileName in fileList)
                {
                    DateTime start = DateTime.Now;

                    Console.WriteLine("Pass: "******"/" + numberOfTrainingSetPasses + ", " + index + "/" + fileList.Count + " Training file: " + Path.GetFileName(trainingFileName));
                    ImageDescription inputImage    = ImageFileHandler.loadFromPath(trainingFileName);
                    ImageDescription computedImage = baseAlgorithm.test(inputImage);

                    ResizeFilter     resizeColor   = new ResizeFilter(inputImage.sizeX / resizeFactor, inputImage.sizeY / resizeFactor, ImageDescriptionUtil.colorChannels);
                    ImageDescription newInputImage = resizeColor.filter(inputImage);

                    ImageDescription inputImageGroundTruth = ImageFileHandler.loadFromPath(benchmark.getTrainingFileGroundTruth(trainingFileName));
                    inputImageGroundTruth.computeGrayscale();
                    ResizeFilter     resizeGrayscale          = new ResizeFilter(inputImage.sizeX / resizeFactor, inputImage.sizeY / resizeFactor, ImageDescriptionUtil.grayscaleChannel);
                    ImageDescription newInputImageGroundTruth = resizeGrayscale.filter(inputImageGroundTruth);

                    ImageDescription resizedComputed = resizeGrayscale.filter(computedImage);
                    newInputImage.setColorChannel(ColorChannelEnum.Layer, resizedComputed.gray);

                    float loss = algorithm.train(newInputImage, newInputImageGroundTruth);
                    totalLoss     += loss;
                    totalPassLoss += loss;
                    index++;
                    totalIndex++;

                    double timeElapsed      = (DateTime.Now - start).TotalSeconds;
                    double timeElapsedSoFar = (DateTime.Now - trainingStart).TotalSeconds;
                    double estimatedTime    = (timeElapsedSoFar / totalIndex) * (totalNumberOfFiles - totalIndex);
                    Console.WriteLine("Loss: " + loss.ToString("0.00") + " Time: " + timeElapsed.ToString("0.00") + "s Time elapsed: "
                                      + timeElapsedSoFar.ToString("0.00") + "s ETA: " + estimatedTime.ToString("0.00") + "s");
                }
                double tariningPassTimeElapsed = (DateTime.Now - trainingPassStart).TotalSeconds;
                Console.WriteLine("Pass took " + tariningPassTimeElapsed.ToString("0.00") + " sec. Pass loss: " + totalPassLoss.ToString("0.00")
                                  + " Avg loss: " + (totalPassLoss / (fileList.Count)).ToString("0.00"));
            }
            double totalTimeElapsed = (DateTime.Now - trainingStart).TotalSeconds;

            Console.WriteLine("Training took " + totalTimeElapsed.ToString("0.00") + " sec. Total loss: " + totalLoss.ToString("0.00")
                              + " Avg loss: " + (totalLoss / (totalNumberOfFiles)).ToString("0.00"));
        }
        private static void testAlgorithmOnFile(string algorithmToTest, string filename)
        {
            Console.WriteLine("Started loading " + algorithmToTest);
            EdgeDetectionAlgorithm edgeDetectionAlgorithm = EdgeDetectionAlgorithmUtil.loadAlgorithmFromCompressedFile(algorithmToTest);

            Console.WriteLine("Loaded algorithm. Testing.");
            ImageDescription inputImage  = ImageFileHandler.loadFromPath(filename);
            ImageDescription outputImage = edgeDetectionAlgorithm.test(inputImage);

            ImageFileHandler.saveToPath(outputImage, "test", ".png");
            Console.WriteLine("Saved");
        }
        public static void loadAndTestModel(string inputModelFilename, string relativeOutputPath)
        {
            EdgeDetectionAlgorithm         algorithm = EdgeDetectionAlgorithmUtil.loadAlgorithmFromCompressedFile(inputModelFilename);
            BerkeleyEdgeDetectionBenchmark benchmark = new BerkeleyEdgeDetectionBenchmark(localBenchmarkPath, Path.Combine(localBenchmarkPath, relativeOutputPath));
            EdgeDetectionProcessor         processor = new EdgeDetectionProcessor(benchmark, 0, true);

            Console.WriteLine("Testing started.");
            processor.test(algorithm);
            Console.WriteLine("Testing finished.");

            Console.WriteLine("Validation started.");
            processor.validate();
            Console.WriteLine("Validation finished.");
        }
Esempio n. 7
0
        public void train(EdgeDetectionAlgorithm algorithm)
        {
            BlackAndWhiteConverter blackAndWhiteConverter = new BlackAndWhiteConverter(1);

            DateTime      trainingStart = DateTime.Now;
            float         totalLoss     = 0;
            List <String> fileList      = new List <string>(benchmark.getTrainingFilesPathList());

            int totalNumberOfFiles = numberOfTrainingSetPasses * fileList.Count;
            int totalIndex         = 0;

            for (int pass = 0; pass < numberOfTrainingSetPasses; pass++)
            {
                ListUtils.Shuffle(fileList);
                int      index             = 1;
                float    totalPassLoss     = 0;
                DateTime trainingPassStart = DateTime.Now;
                foreach (string trainingFileName in fileList)
                {
                    DateTime start = DateTime.Now;

                    Console.WriteLine("Pass: "******"/" + numberOfTrainingSetPasses + ", " + index + "/" + fileList.Count + " Training file: " + Path.GetFileName(trainingFileName));
                    ImageDescription inputImage            = ImageFileHandler.loadFromPath(trainingFileName);
                    ImageDescription inputImageGroundTruth = ImageFileHandler.loadFromPath(benchmark.getTrainingFileGroundTruth(trainingFileName));
                    inputImageGroundTruth.computeGrayscale();
                    inputImageGroundTruth = blackAndWhiteConverter.filter(inputImageGroundTruth);
                    float loss = algorithm.train(inputImage, inputImageGroundTruth);
                    totalLoss     += loss;
                    totalPassLoss += loss;
                    index++;
                    totalIndex++;

                    double timeElapsed      = (DateTime.Now - start).TotalSeconds;
                    double timeElapsedSoFar = (DateTime.Now - trainingStart).TotalSeconds;
                    double estimatedTime    = (timeElapsedSoFar / totalIndex) * (totalNumberOfFiles - totalIndex);
                    Console.WriteLine("Loss: " + loss.ToString("0.00") + " Time: " + timeElapsed.ToString("0.00") + "s Time elapsed: "
                                      + timeElapsedSoFar.ToString("0.00") + "s ETA: " + estimatedTime.ToString("0.00") + "s");
                }
                double tariningPassTimeElapsed = (DateTime.Now - trainingPassStart).TotalSeconds;
                Console.WriteLine("Pass took " + tariningPassTimeElapsed.ToString("0.00") + " sec. Pass loss: " + totalPassLoss.ToString("0.00")
                                  + " Avg loss: " + (totalPassLoss / (fileList.Count)).ToString("0.00"));
            }
            double totalTimeElapsed = (DateTime.Now - trainingStart).TotalSeconds;

            Console.WriteLine("Training took " + totalTimeElapsed.ToString("0.00") + " sec. Total loss: " + totalLoss.ToString("0.00")
                              + " Avg loss: " + (totalLoss / (totalNumberOfFiles)).ToString("0.00"));
        }
Esempio n. 8
0
        public void initialize()
        {
            ContextualMemoryEdgeDetectionAlgorithm algorithm = new ContextualMemoryEdgeDetectionAlgorithm(colorChannels, longestContextLength, tableSizeBits, numberOfRays);
            //ContextualMemoryEdgeDetectionAlgorithmNoCollisions algorithm = new ContextualMemoryEdgeDetectionAlgorithmNoCollisions(colorChannels, longestContextLength, tableSizeBits, numberOfRays);
            //ContextualMemoryEdgeDetectionAlgorithmNoCollisionsBiasReplace algorithm =
            //    new ContextualMemoryEdgeDetectionAlgorithmNoCollisionsBiasReplace(colorChannels, longestContextLength, tableSizeBits, numberOfRays);

            ImageFilterChain filterChain = new ImageFilterChain();

            filterChain.addFilter(new CannyAppenderFilter());
            //filterChain.addFilter(new SobelAppenderFilter());
            filterChain.addFilter(new KirschAppenderFilter(1.4f, true, 32, 0));
            //filterChain.addFilter(new GaussFilter(5, 1.4f, new HashSet<ColorChannelEnum> { ColorChannelEnum.Gray }));
            filterChain.addFilter(new GaussFilterExcludeComputed(5, 1.4f, colorChannels));
            //filterChain.addFilter(new GaussFilter(5, 1.4f, colorChannels));
            algorithm.inputImageFilterChain = filterChain;

            this.algorithm = algorithm;
        }
        public static void trainAndSaveModel(string outputModelFilename, string relativeOutputPath,
                                             int numberOfRays = 16, int rayLength = 10, int memoryBits = 24, int numberOfPasses = 1, bool useRotations = false)
        {
            ISet <ColorChannelEnum> colorChannels = new HashSet <ColorChannelEnum> {
                ColorChannelEnum.Red, ColorChannelEnum.Green, ColorChannelEnum.Blue
            };
            ISet <ColorChannelEnum> colorChannelsAndComputed = new HashSet <ColorChannelEnum> {
                ColorChannelEnum.Red, ColorChannelEnum.Green, ColorChannelEnum.Blue, ColorChannelEnum.Canny, ColorChannelEnum.Kirsch
            };

            ImageFilterChain filterChain = new ImageFilterChain();

            filterChain.addFilter(new CannyAppenderFilter());
            filterChain.addFilter(new KirschAppenderFilter());
            filterChain.addFilter(new GaussFilterExcludeComputed(5, 1.4f, colorChannels));
            //filterChain.addFilter(new MedianFilter(1, colorChannels));

            ContextualMemoryEdgeDetectionAlgorithm algorithm = new ContextualMemoryEdgeDetectionAlgorithm(colorChannelsAndComputed, rayLength, memoryBits, numberOfRays);

            //ContextualMemoryEdgeDetectionAlgorithmNoCollisions algorithm = new ContextualMemoryEdgeDetectionAlgorithmNoCollisions(colorChannelsAndComputed, rayLength, memoryBits, numberOfRays);
            //ContextualMemoryEdgeDetectionAlgorithmNoCollisionsBiasReplace algorithm =
            //    new ContextualMemoryEdgeDetectionAlgorithmNoCollisionsBiasReplace(colorChannelsAndComputed, rayLength, memoryBits, numberOfRays);
            algorithm.inputImageFilterChain = filterChain;

            BerkeleyEdgeDetectionBenchmark benchmark = new BerkeleyEdgeDetectionBenchmark(localBenchmarkPath, Path.Combine(localBenchmarkPath, relativeOutputPath), useRotations, false);
            EdgeDetectionProcessor         processor = new EdgeDetectionProcessor(benchmark, numberOfPasses, true);

            Console.WriteLine("Training started.");
            processor.train(algorithm);
            Console.WriteLine("Training finished.");

            EdgeDetectionAlgorithmUtil.saveToCompressedFile(algorithm, outputModelFilename);

            EdgeDetectionAlgorithm algorithmToTest = EdgeDetectionAlgorithmUtil.loadAlgorithmFromCompressedFile(outputModelFilename);

            Console.WriteLine("Testing started.");
            processor.test(algorithmToTest);
            Console.WriteLine("Testing finished.");

            Console.WriteLine("Validation started.");
            processor.validate();
            Console.WriteLine("Validation finished.");
        }
Esempio n. 10
0
        private static (float[, ], float[, ]) GetKernels(EdgeDetectionAlgorithm alg)
        {
            switch (alg)
            {
            case EdgeDetectionAlgorithm.Sobel:
                return(Kernels.GetSobelKernels());

            case EdgeDetectionAlgorithm.Prewitt:
                return(Kernels.GetPrewittKernels());

            case EdgeDetectionAlgorithm.Sharr:
                return(Kernels.GetSharrKernels());

            case EdgeDetectionAlgorithm.Laplace:
                return(Kernels.GetLaplaceKernel(), null);      //Оператор Лапласа использует 1 ядро свертки

            default:
                throw new ArgumentException("Unknown edge detection algorithm");
            }
        }
Esempio n. 11
0
        public void test(EdgeDetectionAlgorithm algorithm)
        {
            DateTime      testingStart = DateTime.Now;
            List <String> fileList     = benchmark.getTestFilesPathList();
            int           index        = 1;

            string outputDirectory = null;

            foreach (string testFileName in fileList)
            {
                DateTime start = DateTime.Now;
                outputDirectory = Path.GetDirectoryName(benchmark.getTestFileOutputPathWithoutExtension(testFileName));
                if (!Directory.Exists(outputDirectory))
                {
                    Directory.CreateDirectory(outputDirectory);
                }
                Console.WriteLine(index + "/" + fileList.Count + " Testing file: " + Path.GetFileName(testFileName));
                ImageDescription inputImage  = ImageFileHandler.loadFromPath(testFileName);
                ImageDescription outputImage = algorithm.test(inputImage);
                ImageFileHandler.saveToPath(outputImage, benchmark.getTestFileOutputPathWithoutExtension(testFileName), outputFileExtension);


                double timeElapsed      = (DateTime.Now - start).TotalSeconds;
                double timeElapsedSoFar = (DateTime.Now - testingStart).TotalSeconds;
                double estimatedTime    = (timeElapsedSoFar / index) * (fileList.Count - index);
                Console.WriteLine(timeElapsed.ToString("0.00") + "s Time elapsed: "
                                  + timeElapsedSoFar.ToString("0.00") + "s ETA: " + estimatedTime.ToString("0.00") + "s");
                index++;
            }
            double totalTimeElapsed = (DateTime.Now - testingStart).TotalSeconds;

            Console.WriteLine("Testing took " + totalTimeElapsed.ToString("0.00") + " sec.");


            if (testOnTrainingFiles)
            {
                Console.WriteLine("Testing on training files");
                testingStart = DateTime.Now;
                index        = 0;

                // we have the outputDirectory from test, else, relative to the exe
                outputDirectory = Path.Combine(outputDirectory, trainingFilesTestOutput);
                if (!Directory.Exists(outputDirectory))
                {
                    Directory.CreateDirectory(outputDirectory);
                }

                fileList = new List <string>(benchmark.getTrainingFilesPathList());
                foreach (string trainingFileName in fileList)
                {
                    DateTime start      = DateTime.Now;
                    string   outputPath = Path.Combine(outputDirectory, Path.GetFileNameWithoutExtension(trainingFileName));
                    Console.WriteLine(index + "/" + fileList.Count + " Testing file: " + Path.GetFileName(trainingFileName));
                    ImageDescription inputImage  = ImageFileHandler.loadFromPath(trainingFileName);
                    ImageDescription outputImage = algorithm.test(inputImage);
                    ImageFileHandler.saveToPath(outputImage, outputPath, outputFileExtension);
                    index++;

                    double timeElapsed = (DateTime.Now - start).TotalSeconds;
                    Console.WriteLine(timeElapsed.ToString("0.00") + " seconds");
                }
                totalTimeElapsed = (DateTime.Now - testingStart).TotalSeconds;
                Console.WriteLine("Testing on training files took " + totalTimeElapsed.ToString("0.00") + " sec.");
            }
        }
Esempio n. 12
0
        public void trainNestedAlgorithm(ContextualMemoryNestedAlgorithm nestedAlgorithm)
        {
            BlackAndWhiteConverter blackAndWhiteConverter = new BlackAndWhiteConverter(1);
            //BlackAndWhiteConverter blackAndWhiteConverter = new BlackAndWhiteConverter(63);

            List <String> fileList = new List <string>(benchmark.getTrainingFilesPathList());

            List <ContextualMemoryNestedAlgorithmLayer> layers = nestedAlgorithm.getLayers();

            for (int layerIndex = 0; layerIndex < layers.Count; layerIndex++)
            {
                ContextualMemoryNestedAlgorithmLayer layer = layers[layerIndex];
                layer.initialize();
                Console.WriteLine("Layer: " + (layerIndex + 1) + "/" + layers.Count);

                EdgeDetectionAlgorithm algorithm = layer.algorithm;

                DateTime trainingStart      = DateTime.Now;
                float    totalLoss          = 0;
                int      totalNumberOfFiles = numberOfTrainingSetPasses * fileList.Count;
                int      totalIndex         = 0;
                for (int pass = 0; pass < numberOfTrainingSetPasses; pass++)
                {
                    ListUtils.Shuffle(fileList);
                    int      index             = 1;
                    float    totalPassLoss     = 0;
                    DateTime trainingPassStart = DateTime.Now;
                    foreach (string trainingFileName in fileList)
                    {
                        DateTime start = DateTime.Now;
                        Console.WriteLine("Pass: "******"/" + numberOfTrainingSetPasses + ", " + index + "/" + fileList.Count + " Training file: " + Path.GetFileName(trainingFileName));

                        ImageDescription inputImage = ImageFileHandler.loadFromPath(trainingFileName);
                        int layerResizeFactor       = layer.resizeFactor;

                        ImageDescription computedImage = null;
                        if (layerIndex > 0)
                        {
                            List <ImageDescription> computedImages = nestedAlgorithm.computeImageForLayers(inputImage, layerIndex);
                            computedImage = computedImages[layerIndex - 1];
                        }

                        ImageDescription inputImageGroundTruth = ImageFileHandler.loadFromPath(benchmark.getTrainingFileGroundTruth(trainingFileName));
                        inputImageGroundTruth = blackAndWhiteConverter.filter(inputImageGroundTruth);

                        ImageDescription newInputImage            = null;
                        ImageDescription newInputImageGroundTruth = null;

                        ResizeFilter resizeGrayscale = new ResizeFilter(inputImage.sizeX / layerResizeFactor, inputImage.sizeY / layerResizeFactor, ImageDescriptionUtil.grayscaleChannel);
                        ResizeFilter resizeColor     = new ResizeFilter(inputImage.sizeX / layerResizeFactor, inputImage.sizeY / layerResizeFactor, ImageDescriptionUtil.colorChannels);

                        if (layerResizeFactor == 1)
                        {
                            newInputImage            = inputImage;
                            newInputImageGroundTruth = inputImageGroundTruth;
                        }
                        else
                        {
                            newInputImage            = resizeColor.filter(inputImage);
                            newInputImageGroundTruth = resizeGrayscale.filter(inputImageGroundTruth);
                        }
                        if (layerIndex > 0)
                        {
                            ImageDescription resizedComputed = resizeGrayscale.filter(computedImage);
                            newInputImage.setColorChannel(ColorChannelEnum.Layer, resizedComputed.gray);
                        }

                        float loss = algorithm.train(newInputImage, newInputImageGroundTruth);

                        totalLoss     += loss;
                        totalPassLoss += loss;
                        index++;
                        totalIndex++;

                        double timeElapsed      = (DateTime.Now - start).TotalSeconds;
                        double timeElapsedSoFar = (DateTime.Now - trainingStart).TotalSeconds;
                        double estimatedTime    = (timeElapsedSoFar / totalIndex) * (totalNumberOfFiles - totalIndex);
                        Console.WriteLine("Loss: " + loss.ToString("0.00") + " Time: " + timeElapsed.ToString("0.00") + "s Time elapsed: "
                                          + timeElapsedSoFar.ToString("0.00") + "s ETA: " + estimatedTime.ToString("0.00") + "s");
                    }
                    double tariningPassTimeElapsed = (DateTime.Now - trainingPassStart).TotalSeconds;
                    Console.WriteLine("Pass took " + tariningPassTimeElapsed.ToString("0.00") + " sec. Pass loss: " + totalPassLoss.ToString("0.00")
                                      + " Avg loss: " + (totalPassLoss / (fileList.Count)).ToString("0.00"));
                }
                double totalTimeElapsed = (DateTime.Now - trainingStart).TotalSeconds;
                Console.WriteLine("Training took " + totalTimeElapsed.ToString("0.00") + " sec. Total loss: " + totalLoss.ToString("0.00")
                                  + " Avg loss: " + (totalLoss / (totalNumberOfFiles)).ToString("0.00"));
            }

            Console.WriteLine("Training blender");

            DateTime     blenderTrainingStart      = DateTime.Now;
            float        blenderTotalLoss          = 0;
            int          blenderTotalNumberOfFiles = /* numberOfTrainingSetPasses * */ fileList.Count;
            int          blenderTotalIndex         = 0;
            ImageBlender blender = nestedAlgorithm.getImageBlender();
            //for (int pass = 0; pass < numberOfTrainingSetPasses; pass++)
            {
                ListUtils.Shuffle(fileList);
                int      index             = 1;
                float    totalPassLoss     = 0;
                DateTime trainingPassStart = DateTime.Now;
                foreach (string trainingFileName in fileList)
                {
                    DateTime start = DateTime.Now;
                    //Console.Write("Pass: "******"/" + numberOfTrainingSetPasses + ", ");
                    Console.WriteLine(index + "/" + fileList.Count + " Training file: " + Path.GetFileName(trainingFileName));

                    ImageDescription        inputImage     = ImageFileHandler.loadFromPath(trainingFileName);
                    List <ImageDescription> computedImages = nestedAlgorithm.computeImageForLayers(inputImage, layers.Count);

                    ImageDescription inputImageGroundTruth = ImageFileHandler.loadFromPath(benchmark.getTrainingFileGroundTruth(trainingFileName));
                    inputImageGroundTruth = blackAndWhiteConverter.filter(inputImageGroundTruth);

                    float blenderLoss = blender.train(computedImages, inputImageGroundTruth);

                    blenderTotalLoss += blenderLoss;
                    totalPassLoss    += blenderLoss;
                    index++;
                    blenderTotalIndex++;

                    double timeElapsed      = (DateTime.Now - start).TotalSeconds;
                    double timeElapsedSoFar = (DateTime.Now - blenderTrainingStart).TotalSeconds;
                    double estimatedTime    = (timeElapsedSoFar / blenderTotalIndex) * (blenderTotalNumberOfFiles - blenderTotalIndex);
                    Console.WriteLine("Loss: " + blenderLoss.ToString("0.00") + " Time: " + timeElapsed.ToString("0.00") + "s Time elapsed: "
                                      + timeElapsedSoFar.ToString("0.00") + "s ETA: " + estimatedTime.ToString("0.00") + "s");
                }
                //double tariningPassTimeElapsed = (DateTime.Now - trainingPassStart).TotalSeconds;
                //Console.WriteLine("Pass took " + tariningPassTimeElapsed.ToString("0.00") + " sec. Pass loss: " + totalPassLoss.ToString("0.00")
                //+ " Avg loss: " + (totalPassLoss / (fileList.Count)).ToString("0.00"));
            }
            double blenderTotalTimeElapsed = (DateTime.Now - blenderTrainingStart).TotalSeconds;

            Console.WriteLine("Training took " + blenderTotalTimeElapsed.ToString("0.00") + " sec. Total loss: " + blenderTotalLoss.ToString("0.00")
                              + " Avg loss: " + (blenderTotalLoss / (blenderTotalNumberOfFiles)).ToString("0.00"));
        }
        private static void trainTestAndSaveNestedAlgorithm()
        {
            string relativeImageOutputFolder = "cm m20 collisions 4layers thr1 1 2 4 8 canny kirsh nms newderivative (gauss) square l16 squaremix";
            string outputModelFilename       = relativeImageOutputFolder + ".alg";
            int    numberOfPasses            = 1;
            int?   trainingSetSizeLimit      = null;
            bool   useRotations        = false;
            bool   testOnTrainingFiles = false;

            ISet <ColorChannelEnum> colorChannels = new HashSet <ColorChannelEnum> {
                ColorChannelEnum.Red, ColorChannelEnum.Green, ColorChannelEnum.Blue, ColorChannelEnum.Canny, ColorChannelEnum.Kirsch
            };
            ISet <ColorChannelEnum> colorAndComputedChannels = new HashSet <ColorChannelEnum> {
                ColorChannelEnum.Red, ColorChannelEnum.Green, ColorChannelEnum.Blue, ColorChannelEnum.Canny, ColorChannelEnum.Kirsch, ColorChannelEnum.Layer
            };

            ContextualMemoryNestedAlgorithm nestedAlgorithm = new ContextualMemoryNestedAlgorithm();

            ContextualMemoryNestedAlgorithmLayer layer0 = new ContextualMemoryNestedAlgorithmLayer();

            layer0.colorChannels        = colorChannels;
            layer0.longestContextLength = 16;
            layer0.numberOfRays         = 16;
            layer0.tableSizeBits        = 20;
            layer0.resizeFactor         = 1;
            layer0.outputResults        = true;
            nestedAlgorithm.addLayer(layer0);

            ContextualMemoryNestedAlgorithmLayer layer1 = new ContextualMemoryNestedAlgorithmLayer();

            layer1.colorChannels        = colorAndComputedChannels;
            layer1.longestContextLength = 16;
            layer1.numberOfRays         = 16;
            layer1.tableSizeBits        = 20;
            layer1.resizeFactor         = 2;
            layer1.outputResults        = true;
            nestedAlgorithm.addLayer(layer1);

            ContextualMemoryNestedAlgorithmLayer layer2 = new ContextualMemoryNestedAlgorithmLayer();

            layer2.colorChannels        = colorAndComputedChannels;
            layer2.longestContextLength = 16;
            layer2.numberOfRays         = 16;
            layer2.tableSizeBits        = 20;
            layer2.resizeFactor         = 4;
            layer2.outputResults        = true;
            nestedAlgorithm.addLayer(layer2);

            ContextualMemoryNestedAlgorithmLayer layer3 = new ContextualMemoryNestedAlgorithmLayer();

            layer3.colorChannels        = colorAndComputedChannels;
            layer3.longestContextLength = 16;
            layer3.numberOfRays         = 16;
            layer3.tableSizeBits        = 19;
            layer3.resizeFactor         = 8;
            layer3.outputResults        = true;
            nestedAlgorithm.addLayer(layer3);

            //nestedAlgorithm.setImageBlender(new SimpleGrayscaleImageBlender());
            nestedAlgorithm.setImageBlender(new LogisticMixGrayscaleImageBlender(nestedAlgorithm.getLayers().Count));

            BerkeleyEdgeDetectionBenchmark benchmark = new BerkeleyEdgeDetectionBenchmark(localBenchmarkPath, Path.Combine(localBenchmarkPath, relativeImageOutputFolder), useRotations, false, trainingSetSizeLimit);
            EdgeDetectionProcessor         processor = new EdgeDetectionProcessor(benchmark, numberOfPasses, testOnTrainingFiles);

            Console.WriteLine("Training started.");
            processor.trainNestedAlgorithm(nestedAlgorithm);
            Console.WriteLine("Training finished.");

            EdgeDetectionAlgorithmUtil.saveToCompressedFile(nestedAlgorithm, outputModelFilename);

            nestedAlgorithm = null;

            EdgeDetectionAlgorithm algorithm = EdgeDetectionAlgorithmUtil.loadAlgorithmFromCompressedFile(outputModelFilename);

            Console.WriteLine("Testing started.");
            processor.testNestedAlgorithm((ContextualMemoryNestedAlgorithm)algorithm);
            Console.WriteLine("Testing finished.");
        }
Esempio n. 14
0
        //HSV
        #endregion

        public EdgeDetection(EdgeDetectionAlgorithm alg) : base(3, 3)
        {
            (KernelV, KernelH) = GetKernels(alg);
        }