/// <summary> /// Silent and short version of recognition workflow /// </summary> public static void RecognizeShort() { // --------------------------------------- // input and output preparation string testFolderPath = Path.Combine(FindDataFolder(), TestDataFolderName); Console.WriteLine("Test folder located. Path: " + testFolderPath); string templatePath = Path.Combine(testFolderPath, TemplateName); string outputPath = Path.Combine(testFolderPath, "Result"); if (!Directory.Exists(outputPath)) { Directory.CreateDirectory(outputPath); } // --------------------------------------- // actual OMR API calls OmrEngine engine = new OmrEngine(); TemplateProcessor templateProcessor = engine.GetTemplateProcessor(templatePath); for (int i = 0; i < UserImages.Length; i++) { string imagePath = Path.Combine(testFolderPath, UserImages[i]); string csvResult = templateProcessor.RecognizeImage(imagePath).GetCsv(); File.WriteAllText(Path.Combine(outputPath, UserImages[i] + ".csv"), csvResult); } }
public static void Run() { // ExStart:SetElementLevelThreshold // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Load template file OmrTemplate template = OmrTemplate.Load(dataDir + "questions.amr"); // Load the image to be analyzed OmrImage image = OmrImage.Load(dataDir + "answers.jpg"); // Get the first page of the template OmrPage page = template.Pages[0]; // Create an element by passing the name, location and size GridElement element = new GridElement("grid1", new PointF(10, 20), new SizeF(60, 30)); // Add element to the page page.Elements.Add(element); // Create configuration for the element element.Configuration = new OmrConfig(); // Set the TrimWhitePixels to false element.Configuration.TrimWhitePixels = false; // Create an instance of OmrEngine and pass object of OmrTemplate as parameter OmrEngine engine = new OmrEngine(template); // Extract the data OmrProcessingResult result = engine.ExtractData(new OmrImage[] { image }); // ExEnd:SetElementLevelThreshold }
public static void Run() { // ExStart:1 string TemplateName = @"Sheet.omr"; string[] UserImages = new string[] { "Sheet1.jpg", "Sheet2.jpg" }; int CustomThreshold = 40; // input and output preparation string testFolderPath = RunExamples.GetSourceDir(); string templatePath = Path.Combine(testFolderPath, TemplateName); string outputPath = RunExamples.GetResultDir(); // actual OMR API calls OmrEngine engine = new OmrEngine(); TemplateProcessor templateProcessor = engine.GetTemplateProcessor(templatePath); Console.WriteLine("Template loaded."); for (int i = 0; i < UserImages.Length; i++) { string imagePath = Path.Combine(testFolderPath, UserImages[i]); string csvResult = templateProcessor.RecognizeImage(imagePath, CustomThreshold).GetCsv(); File.WriteAllText(Path.Combine(outputPath, Path.GetFileNameWithoutExtension(UserImages[i]) + "_Threshold.csv"), csvResult); Console.WriteLine("Result exported. Path: " + Path.Combine(outputPath, Path.GetFileNameWithoutExtension(UserImages[i]) + "_Threashold.csv")); } Console.WriteLine("PerformOMRWithThreshold executed successfully.\n\r"); // ExEnd:1 }
public static void Run() { // ExStart:SettingMarkThreshold // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Load template file OmrTemplate template = OmrTemplate.Load(dataDir + "questions.amr"); // Load the image to be analyzed OmrImage image = OmrImage.Load(dataDir + "answers.jpg"); // Create an instance of OmrEngine and pass object of OmrTemplate as parameter OmrEngine engine = new OmrEngine(template); // Get the configurations of OmrEngine OmrConfig config = engine.Configuration; // Set fill threshold config.FillThreshold = 0.12; // Extract the data OmrProcessingResult result = engine.ExtractData(new OmrImage[] { image }); // ExEnd:SettingMarkThreshold }
public static void Run() { // ExStart:1 // input and output preparation string testFolderPath = RunExamples.GetGenerationSourceDir(); string outputPath = RunExamples.GetResultDir(); string[] images = { Path.Combine(testFolderPath, "Aspose.jpg") }; // initialize engine OmrEngine engine = new OmrEngine(); GenerationResult res = engine.GenerateTemplate(Path.Combine(testFolderPath, "AsposeTestWithImage.txt"), images); // check in case of errors if (res.ErrorCode != 0) { Console.WriteLine("ERROR CODE: " + res.ErrorCode); } // save generation result: image and .omr template res.Save(outputPath, "AsposeTestWithImage"); // ExEnd:1 Console.WriteLine("GenerateTemplateWithImages executed successfully.\n\r"); }
public static void Run() { // ExStart:1 string TemplateName = @"AsposeTestWithBarcode.omr"; string UserImage = "AsposeTestWithBarcode.jpg"; // input and output preparation string testFolderPath = RunExamples.GetSourceDir(); string templatePath = Path.Combine(testFolderPath, TemplateName); string outputPath = RunExamples.GetResultDir(); // actual OMR API calls OmrEngine engine = new OmrEngine(); TemplateProcessor templateProcessor = engine.GetTemplateProcessor(templatePath); Console.WriteLine("Template loaded."); string imagePath = Path.Combine(testFolderPath, UserImage); string csvResult = templateProcessor.RecognizeImage(imagePath).GetCsv(); File.WriteAllText(Path.Combine(outputPath, Path.GetFileNameWithoutExtension(UserImage) + ".csv"), csvResult); Console.WriteLine("Result exported. Path: " + Path.Combine(outputPath, Path.GetFileNameWithoutExtension(UserImage) + ".csv")); Console.WriteLine("OMROperationWithBarcodeRecognition executed successfully.\n\r"); // ExEnd:1 }
public static void Run() { // ExStart:LoadTemplateFromUrl string templateUrl = "https://Github.com/asposeocr/Aspose_OCR_NET/raw/master/Examples/Data/OCR/questions.amr"; string imageUrl = "https://Github.com/asposeocr/Aspose_OCR_NET/raw/master/Examples/Data/OCR/answers.jpg"; // Initialize an instance of OmrTemplate by loading the OMR template from URL OmrTemplate template = OmrTemplate.LoadFromUrl(templateUrl); // image loading from url OmrImage image = OmrImage.LoadFromUrl(imageUrl); // continue working with template and image as usual OmrEngine engine = new OmrEngine(template); OmrProcessingResult result = engine.ExtractData(new OmrImage[] { image }); // Get all page data into an instance of Hashtable Hashtable[] pages = result.PageData; // Loop over all the pages foreach (Hashtable page in pages) { // Display key and value foreach (string key in page.Keys) { Console.WriteLine("[KEY] " + key + " => " + "[VALUE] " + page[key]); } } // ExEnd:LoadTemplateFromUrl }
public static void Run() { // ExStart:SetImageResolution // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Load template file OmrTemplate template = OmrTemplate.Load(dataDir + "questions.amr"); // Load the image to be analyzed OmrImage image = OmrImage.Load(dataDir + "answers.jpg"); // Define new value of image resolution in decimal format image.Resolution = 100.0; // Instantiate the recognition engine for the template OmrEngine engine = new OmrEngine(template); // Extract data. This template has only one page. OmrProcessingResult result = engine.ExtractData(new OmrImage[] { image }); // Load actual result from Hashtable OmrResult = result.PageData[0]; // Get Collection of Keys ICollection key = OmrResult.Keys; foreach (string k in key) { Console.WriteLine(k + ": " + OmrResult[k]); } // ExEnd:SetImageResolution }
public static void Run() { // ExStart:1 // input and output preparation string testFolderPath = RunExamples.GetGenerationSourceDir(); string outputPath = RunExamples.GetResultDir(); string[] GenerationMarkups = new string[] { "Sheet.txt", "Grid.txt", "AsposeTest.txt" }; // initialize engine OmrEngine engine = new OmrEngine(); for (int i = 0; i < GenerationMarkups.Length; i++) { // call template generation providing path to the txt file with markup GenerationResult res = engine.GenerateTemplate(Path.Combine(testFolderPath, GenerationMarkups[i])); // check in case of errors if (res.ErrorCode != 0) { Console.WriteLine("ERROR CODE: " + res.ErrorCode); } // save generation result: image and .omr template res.Save(outputPath, Path.GetFileNameWithoutExtension(GenerationMarkups[i])); } // ExEnd:1 Console.WriteLine("GenerateTemplates executed successfully.\n\r"); }
static void ProcessScansAndExportData() { //fully qualified output path where the scanned images of the forms will be located. var scannedImagePath = AppDomain.CurrentDomain.BaseDirectory + @"Scans\"; ///fully qualified path to the form template var templatePath = AppDomain.CurrentDomain.BaseDirectory + "OmrOutput.omr"; //initialize an instance of the Aspose OMR engine var omrEngine = new OmrEngine(); //retrieve all images from the Scans folder var dirInfo = new DirectoryInfo(scannedImagePath); var files = dirInfo.GetFiles("*.jpg"); //use the omrEngine to create an instance of the template processor based on the generated template var templateProcessor2 = omrEngine.GetTemplateProcessor(templatePath); foreach (var file in files) { //use the template processor to extract form data from the form image string jsonResults = templateProcessor2.RecognizeImage(file.FullName, 28).GetJson(); //save the extracted data in a json file File.WriteAllText(AppDomain.CurrentDomain.BaseDirectory + Path.GetFileNameWithoutExtension(file.FullName) + "_scan_results.json", jsonResults); } }
public static void Run() { // ExStart:DetectImageResolutionAutomatically // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Load template file OmrTemplate template = OmrTemplate.Load(dataDir + "questions.amr"); // Load the image to be analyzed OmrImage image = OmrImage.Load(dataDir + "answers.jpg"); //Set the auto image resolution detection property image.AutoDetectResolution = true; // Instantiate the recognition engine for the template OmrEngine engine = new OmrEngine(template); // Extract data. This template has only one page. OmrProcessingResult result = engine.ExtractData(new OmrImage[] { image }); // Load actual result from Hashtable OmrResult = result.PageData[0]; // Get Collection of Keys ICollection key = OmrResult.Keys; foreach (string k in key) { Console.WriteLine(k + ": " + OmrResult[k]); } // ExEnd:DetectImageResolutionAutomatically }
public static void Run() { // ExStart:1 string TemplateName = @"Sheet.omr"; string[] UserImages = new string[] { "Sheet1.jpg", "Sheet2.jpg" }; string testFolderPath = RunExamples.GetSourceDir(); string templatePath = Path.Combine(testFolderPath, TemplateName); string outputPath = RunExamples.GetResultDir(); int CustomThreshold = 40; // init engine and get template processor OmrEngine engine = new OmrEngine(); TemplateProcessor templateProcessor = engine.GetTemplateProcessor(templatePath); Console.WriteLine("Template loaded."); for (int i = 0; i < UserImages.Length; i++) { string image = UserImages[i]; string imagePath = Path.Combine(testFolderPath, image); Console.WriteLine("\n\rProcessing image: " + imagePath); // timer for performance measure Stopwatch sw = Stopwatch.StartNew(); // recognize image RecognitionResult result = templateProcessor.RecognizeImage(imagePath); sw.Stop(); Console.WriteLine("Recognition time: " + sw.Elapsed); // get export csv string string stringRes = result.GetCsv(); // save csv to output folder File.WriteAllText(Path.Combine(outputPath, Path.GetFileNameWithoutExtension(image) + ".csv"), stringRes); Console.WriteLine("Result Exported. Path: " + Path.Combine(outputPath, Path.GetFileNameWithoutExtension(image) + ".csv")); sw.Restart(); // recalculate recognition results with custom threshold Console.WriteLine("\n\rPerformaing recalculation\n\r"); templateProcessor.Recalculate(result, CustomThreshold); sw.Stop(); Console.WriteLine("Recalculation time: " + sw.Elapsed); // get export csv string stringRes = result.GetCsv(); // save recalculated results File.WriteAllText(Path.Combine(outputPath, Path.GetFileNameWithoutExtension(image) + "_Recalculated.csv"), stringRes); Console.WriteLine("Recalculation result exported. Path: " + Path.Combine(outputPath, Path.GetFileNameWithoutExtension(image) + "_Recalculated.csv")); } Console.WriteLine("PerformOMRRecalculation executed successfully.\n\r"); // ExEnd:1 }
/// <summary> /// Method demonstrated full OMR workflow. /// </summary> public static void Recognize() { // Set custom threshold to use in recalculation int CustomThreshold = 40; string testFolderPath = Path.Combine(FindDataFolder(), TestDataFolderName); Console.WriteLine("Test folder located. Path: " + testFolderPath); string templatePath = Path.Combine(testFolderPath, TemplateName); string outputPath = Path.Combine(testFolderPath, "Result"); if (!Directory.Exists(outputPath)) { Directory.CreateDirectory(outputPath); } // init engine and get template processor OmrEngine engine = new OmrEngine(); TemplateProcessor templateProcessor = engine.GetTemplateProcessor(templatePath); Console.WriteLine("Template loaded."); for (int i = 0; i < UserImages.Length; i++) { string image = UserImages[i]; string imagePath = Path.Combine(testFolderPath, image); Console.WriteLine("Processing image: " + imagePath); // recognize image RecognitionResult result = templateProcessor.RecognizeImage(imagePath); // get export csv string string stringRes = result.GetCsv(); // save csv to output folder string outputName = Path.Combine(outputPath, Path.GetFileNameWithoutExtension(image) + ".csv"); File.WriteAllText(outputName, stringRes); Console.WriteLine("Export done. Path: " + outputName); // recalculate recognition results with custom threshold templateProcessor.Recalculate(result, CustomThreshold); // get export csv string stringRes = result.GetCsv(); // save recalculated results outputName = Path.Combine(outputPath, Path.GetFileNameWithoutExtension(image) + "_recalculated.csv"); File.WriteAllText(outputName, stringRes); Console.WriteLine("Recalculated result export done. Path: " + outputName); Console.WriteLine(); } }
/// <summary> /// Loads and displays CorrectionControl /// </summary> private void GetButtonClicked(object sender, RoutedEventArgs e) { this.DataFolderPath = Path.Combine(FindDataFolder(), TestDataFolderName); string templatePath = Path.Combine(this.DataFolderPath, TemplateName); OmrEngine engine = new OmrEngine(); TemplateProcessor processor = engine.GetTemplateProcessor(templatePath); control = engine.GetCorrectionControl(processor); CustomContentControl.Content = control; control.Initialize(); }
public static void Run() { try { // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Load template file OmrTemplate template = OmrTemplate.Load(dataDir + "sample_1.amr"); // Load the image to be analyzed OmrImage image = OmrImage.Load(dataDir + "sample1.jpg"); // do not forget to set the license for BarCode in case BarCode elements are used //Aspose.BarCode.License licenseBarCode = new Aspose.BarCode.License(); //licenseBarCode.SetLicense(dataDir + "Aspose.Total.lic"); //// do not forget to set the license for BarCode in case BarCode elements are used //var licenseOmr = new Aspose.OCR.License(); //licenseOmr.SetLicense(dataDir + "Aspose.Total.lic"); // Adding BarCode element requires creation of BarcodeElement object // While specifying the barcode display name, its position and size BarcodeElement barcodeElement = new BarcodeElement("Aztec BarCode", new PointF(0, 0), new SizeF(205, 205)); // Add the BarCode element to the page element collection template.Pages[0].Elements.Add(barcodeElement); // Create an instance of OmrEngine and load the template using file path OmrEngine engine = new OmrEngine(template); // Extract OMR data and store the results in an instance of OmrProcessingResults OmrProcessingResult result = engine.ExtractData(new[] { image }); // Get all page data into an instance of Hashtable Hashtable[] pages = result.PageData; // Loop over all the pages foreach (Hashtable page in pages) { // Display key and value foreach (string key in page.Keys) { Console.WriteLine("key: " + key + ": " + "value: " + page[key]); } } } catch (Exception ex) { Console.WriteLine(ex.Message); // Console.WriteLine(ex.Message + "\nThis example will only work if you apply a valid Aspose License. You can purchase full license or get 30 day temporary license from http:// Www.aspose.com/purchase/default.aspx."); } }
public static OmrEngine GetOmrEngine() { if (_omrEngine == null) { _omrEngine = new OmrEngine(); Leadtools.Barcode.BarcodeEngine bce = new Leadtools.Barcode.BarcodeEngine(); _omrEngine.EnginesObject.BarcodeEngine = bce; _omrEngine.EnginesObject.OcrEngine = GetOcrEngine(); } return(_omrEngine); }
public static void Run() { // ExStart:DetectUsingElements // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Load template file and Instantiate the recognition engine for the template OmrTemplate template = OmrTemplate.Load(dataDir + "questions.amr"); OmrEngine engine = new OmrEngine(template); // First, remove all default steps engine.ProcessingPipeline.Clear(); // Now add the grayscale convertor It may not be necessary to add it if the scans are already in grayscale and Adding the Otsu’s binarization algorithm engine.ProcessingPipeline.Add(new GrayscaleAlgorithm()); engine.ProcessingPipeline.Add(new OtsuBinarizationAlgorithm()); // ExEnd:DetectUsingElements }
static void GenerateFormTemplateAndImage() { //fully qualified path to the form markup file var formMarkupFilePath = AppDomain.CurrentDomain.BaseDirectory + "QuestionnaireMarkup.txt"; //initialize an instance of the Aspose OMR engine var omrEngine = new OmrEngine(); //use the Aspose OMR engine to generate the template and image from the markup file var result = omrEngine.GenerateTemplate(formMarkupFilePath); if (result.ErrorCode != 0) { Console.WriteLine($"ERROR: {result.ErrorCode} - {result.ErrorMessage}"); } else { //save the files as OmrOutput.omr for the template, and OmrOutput.png for the form image result.Save("", "OmrOutput"); } }
public static void Run() { // ExStart:SkewedImageSecondMethod // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Load the image to be analyzed OmrImage image = OmrImage.Load(dataDir + "answers.jpg"); OmrEngine engine = new OmrEngine(new OmrTemplate()); // Get skew degree of the image double degree = engine.GetSkewDegree(image); // Rotate image to correct skew engine.RotateImage(ref image, degree); // Save image image.AsBitmap().Save(dataDir + "result_out.jpg"); // ExEnd:SkewedImageSecondMethod }
public static void Run() { // ExStart:ExtractText // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Load template file OmrTemplate template = OmrTemplate.Load(dataDir + "questions.amr"); // Load the image to be analyzed OmrImage image = OmrImage.Load(dataDir + "answers.jpg"); // Set resource for TextOcrElement TextOcrElement.Resource = dataDir + "Aspose.OCR.Spanish.Resources.zip"; // Create an instance of TextOcrElement and initialize it by specifying the location of text and its size in mm TextOcrElement textElement = new TextOcrElement("OCR Text", new PointF(23.6f, 15.5f), new SizeF(14.6f, 4.7f)); // Add the TextOcrElement to the page element collection template.Pages[0].Elements.Add(textElement); // Create an instance of OmrEngine and load the template using file path OmrEngine engine = new OmrEngine(template); // Extract OMR data and store the results in an instance of OmrProcessingResults OmrProcessingResult result = engine.ExtractData(new OmrImage[] { image }); // Get all page data into an instance of Hashtable Hashtable[] pages = result.PageData; // Loop over all the pages foreach (Hashtable page in pages) { // Display key and value foreach (string key in page.Keys) { Console.WriteLine("key: " + key + ": " + "value: " + page[key]); } } // ExEnd:ExtractText }