Beispiel #1
0
 private static async Task<OcrResult> Recognize(string fileName, OcrLanguage language)
 {
     var ocrEngine = new OcrEngine(language);
     using (var stream = File.OpenRead(fileName))
     {
         var winRtStream = stream.AsRandomAccessStream();
         var decoder = await BitmapDecoder.CreateAsync(winRtStream);
         var bitmap = await decoder.GetPixelDataAsync();
         return await ocrEngine.RecognizeAsync(decoder.PixelHeight, decoder.PixelWidth, bitmap.DetachPixelData());
     }
 }
        public async void TestOcr(string fileName)
        {
            var ocrEngine = new OcrEngine(OcrLanguage.English);

            using (var stream = File.OpenRead(fileName))
            {
                var winRtStream = stream.AsRandomAccessStream();
                var decoder = await BitmapDecoder.CreateAsync(winRtStream);
                var bitmap = await decoder.GetPixelDataAsync();
                var ocrResult = await ocrEngine.RecognizeAsync(decoder.PixelHeight, decoder.PixelWidth, bitmap.DetachPixelData());
                var text = string.Join(Environment.NewLine, ocrResult.Lines);
                Trace.WriteLine(text);
            }
        }
Beispiel #3
0
        private static async Task <OcrResult> GetResult(string imagePath, string lang)
        {
            StorageFile storageFile;
            var         path = Path.GetFullPath(imagePath);

            storageFile = await StorageFile.GetFileFromPathAsync(path);

            IRandomAccessStream randomAccessStream = await storageFile.OpenReadAsync();

            BitmapDecoder decoder = await BitmapDecoder.CreateAsync(randomAccessStream);

            SoftwareBitmap bitmap = await decoder.GetSoftwareBitmapAsync(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);

            Language  language  = new Language(lang);
            OcrEngine ocrEngine = OcrEngine.TryCreateFromLanguage(language);

            OcrResult ocrResult = await ocrEngine.RecognizeAsync(bitmap);

            Console.Write(ocrResult.Text);
            return(ocrResult);
        }
Beispiel #4
0
        public async Task <bool> parseTextFromImageASync(MediaFile file)
        {
            OcrResult result;

            OcrEngine ocrEngine = OcrEngine.TryCreateFromLanguage(new Language("en"));

            using (var stream = file.Source.AsRandomAccessStream())
            {
                // Create image decoder.
                var decoder = await BitmapDecoder.CreateAsync(stream);

                // Load bitmap.
                var bitmap = await decoder.GetSoftwareBitmapAsync();

                // Extract text from image.
                result = await ocrEngine.RecognizeAsync(bitmap);
            }

            this.parsedText = result.Text;

            return(true);
        }
        async Task OcrProcessFrameAsync(
            MediaFrameReader reader,
            OcrEngine ocrEngine,
            DeviceOcrResult ocrDeviceResult)
        {
            using (var frame = reader.TryAcquireLatestFrame())
            {
                if (frame?.VideoMediaFrame != null)
                {
                    using (var bitmap = frame.VideoMediaFrame.SoftwareBitmap)
                    {
                        var result = await ocrEngine.RecognizeAsync(bitmap);

                        if (result?.Text != null)
                        {
                            var matchingResults = this.matchExpression.Matches(result.Text);

                            var matched = matchingResults?.Count > 0;

                            if (matched)
                            {
                                // We take the first one, we don't do multiple (yet).
                                ocrDeviceResult.MatchedText = matchingResults[0].Value;
                                ocrDeviceResult.ResultType  = OcrMatchResult.Succeeded;
                                ocrDeviceResult.BestOcrSoftwareBitmap?.Dispose();
                                ocrDeviceResult.BestOcrSoftwareBitmap = null;
                            }
                            else if (result.Text.Length > ocrDeviceResult.BestOcrTextLengthFound)
                            {
                                ocrDeviceResult.BestOcrTextLengthFound = result.Text.Length;
                                ocrDeviceResult.BestOcrSoftwareBitmap?.Dispose();
                                ocrDeviceResult.BestOcrSoftwareBitmap = SoftwareBitmap.Copy(bitmap);
                                ocrDeviceResult.ResultType            = OcrMatchResult.TimedOutCloudCallAvailable;
                            }
                        }
                    }
                }
            }
        }
Beispiel #6
0
        private async void buttonAddPrice_Click(object sender, RoutedEventArgs e)
        {
            CameraCaptureUI captureUI = new CameraCaptureUI();

            captureUI.PhotoSettings.Format = CameraCaptureUIPhotoFormat.Jpeg;
            //captureUI.PhotoSettings.CroppedSizeInPixels = new Size(300, 300);

            StorageFile photo = await captureUI.CaptureFileAsync(CameraCaptureUIMode.Photo);

            if (photo == null)
            {
                // User cancelled photo capture
                return;
            }

            SoftwareBitmap bitmapa = null;
            OcrEngine      silnik  = OcrEngine.TryCreateFromUserProfileLanguages();

            using (var stream = await photo.OpenAsync(Windows.Storage.FileAccessMode.Read))
            {
                var decoder = await BitmapDecoder.CreateAsync(stream);

                bitmapa = await decoder.GetSoftwareBitmapAsync(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
            }
            OcrResult rezultat = await silnik.RecognizeAsync(bitmapa);

            try
            {
                price = ParsePrice(rezultat.Text);
                this.Frame.Navigate(typeof(AddPrice));
            }
            catch (Exception exception)
            {
                MessageDialog message = new MessageDialog(exception.ToString() + "\n Sparsowny text: " + rezultat.Text);
                await message.ShowAsync();
            }
            //textBlock.Text = rezultat.Text;
        }
Beispiel #7
0
        protected virtual async Task <OcrResult?> ContainsAnyAsync(Bitmap bitmap, IEnumerable <string> lines)
        {
            this.Logger.LogInformation("Searching: " + string.Join(", ", lines));

            using (SoftwareBitmap softwareBitmap = await GetSoftwareBitmapAsync(bitmap))
            {
                OcrResult result = await ocrEngine.RecognizeAsync(softwareBitmap);

                var found = (result?.Lines ?? Enumerable.Empty <OcrLine>()).Select(x => x.Text).ToList();

                if (found.Any())
                {
                    Logger.LogInformation("Detected: " + string.Join(", ", found));
                }

                foreach (var line in lines)
                {
                    foreach (var ocrLine in result?.Lines ?? Enumerable.Empty <OcrLine>())
                    {
                        if (ocrLine.Text.Contains(line, StringComparison.OrdinalIgnoreCase))
                        {
                            Logger.LogInformation($"Success because '{line}' was found.");

                            return(result);
                        }
                    }
                }
            }

            if (Settings.CaptureSaveFailure)
            {
                string path = Path.Combine(Settings.CaptureSavePath + Guid.NewGuid().ToString() + ".bmp");
                Logger.LogDebug("saving failed ocr result to: " + path);
                bitmap.Save(path);
            }

            return(null);
        }
Beispiel #8
0
        private async Task ProcessOCR(IRandomAccessStream stream, string language)
        {
            _view.ClearCanvas();
            var imageData = await ImageData.CreateFromStream(stream);

            var ocrLanguage = OcrLanguage.English;

            if (language.Equals("Swedish"))
            {
                ocrLanguage = OcrLanguage.Swedish;
            }
            if (language.Equals("Russian"))
            {
                ocrLanguage = OcrLanguage.Russian;
            }

            var ocrEngine = new OcrEngine(ocrLanguage);

            var rec = await ocrEngine.RecognizeAsync(imageData.OrientedPixelHeight, imageData.OrientedPixelWidth, imageData.Pixels);

            var text = "";

            if (rec.Lines != null)
            {
                foreach (var ocrLine in rec.Lines)
                {
                    foreach (var ocrWord in ocrLine.Words)
                    {
                        text += ocrWord.Text + " ";
                        DrawRectangle(ocrWord.Left, ocrWord.Top, ocrWord.Width, ocrWord.Height, rec.TextAngle, _imageHeight, _imageWidth);
                    }
                    text += "\n";
                }
            }

            _view.SetText(text);
        }
        public async Task <DecodeResult> DecodeAsync(SoftwareBitmap bitmap)
        {
            // The only thing we decode is OCR.
            if (!ActiveSymbologies.Contains(BarcodeSymbologies.OcrA))
            {
                return(null);
            }

            OcrResult ocrResult = await ocrEngine.RecognizeAsync(bitmap);

            // Get the text in the first non-empty line as result
            foreach (OcrLine line in ocrResult.Lines)
            {
                if (!string.IsNullOrEmpty(line.Text))
                {
                    return(new DecodeResult
                    {
                        Symbology = BarcodeSymbologies.OcrA,
                        Text = line.Text
                    });
                }
            }
            return(null);
        }
Beispiel #10
0
        private static async Task <string> OCRInternal(Bitmap bmp, string languageTag)
        {
            Language language = new Language(languageTag);

            if (!OcrEngine.IsLanguageSupported(language))
            {
                throw new Exception($"{language.LanguageTag} is not supported in this system.");
            }

            OcrEngine engine = OcrEngine.TryCreateFromLanguage(language);

            using (InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream())
            {
                bmp.Save(stream.AsStream(), ImageFormat.Bmp);
                BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

                using (SoftwareBitmap softwareBitmap = await decoder.GetSoftwareBitmapAsync())
                {
                    OcrResult ocrResult = await engine.RecognizeAsync(softwareBitmap);

                    return(string.Join("\r\n", ocrResult.Lines.Select(x => x.Text)));
                }
            }
        }
Beispiel #11
0
        /// <summary>
        /// Image -> Text
        /// </summary>
        /// <param name="stream"></param>
        /// <returns></returns>
        private async Task ImageToText(Stream stream)
        {
            // HRRESULT
            try
            {
                // 구성 요소 인식 문제
                BitmapDecoder bitmapDecoder = await BitmapDecoder.CreateAsync(stream.AsRandomAccessStream());

                // 이미지 헤더 인식 문제
                SoftwareBitmap softwareBitmap = await bitmapDecoder.GetSoftwareBitmapAsync();

                //// 이미지 해상도 지원 여부
                //if (m_softwareBitmap.PixelWidth > OcrEngine.MaxImageDimension || m_softwareBitmap.PixelHeight > OcrEngine.MaxImageDimension)
                //{
                //    MessageBox.Show(string.Format("Bitmap dimensions are too big for OCR!\nMax image dimension is {0}!", OcrEngine.MaxImageDimension), "Error!", MessageBoxButtons.OK, MessageBoxIcon.Error);

                //    return;
                //}

                OcrEngine ocrEngine = checkBox_Language.Checked ? OcrEngine.TryCreateFromUserProfileLanguages() : ocrEngine = OcrEngine.TryCreateFromLanguage(m_language);
                OcrResult ocrResult = await ocrEngine.RecognizeAsync(softwareBitmap).AsTask();

                string strtmp = ocrResult.Text;

                // 일본어의 경우 글자 사이사이 공백이 들어가는 현상이 있음
                if (m_language.LanguageTag == "ja")
                {
                    strtmp = ocrResult.Text.Replace(" ", "");
                }

                // true : 최초 추출
                if (m_bExtractFlag)
                {
                    m_strExtractText = strtmp;
                    m_bExtractFlag   = false;
                }

                // 이미지 데이터가 있는데도 결과 값이 없다면
                if (m_bitmap != null && strtmp.Equals(""))
                {
                    label_ErrorExtract.Visible = true;
                    textBox_Extract.Text       = "";
                    m_strExtractText           = "";

                    return;
                }
                else
                {
                    label_ErrorExtract.Visible = false;
                    textBox_Extract.Text       = strtmp;

                    // 추출된 텍스트가 전의 데이터와 같지 않을 때만 Translate
                    if (!m_strExtractText.Equals(strtmp) || !timer_Image.Enabled)
                    {
                        await Translate();

                        m_bExtractFlag = true;
                    }
                }
            }
            catch (Exception)// ex)
            {
                textBox_Extract.Text       = "";
                label_ErrorExtract.Visible = true;
            }
        }
Beispiel #12
0
        public static async Task <string> ExtractText(Bitmap bmp, System.Windows.Point?singlePoint = null)
        {
            Language?selectedLanguage = GetOCRLanguage();

            if (selectedLanguage == null)
            {
                return("");
            }

            XmlLanguage lang    = XmlLanguage.GetLanguage(selectedLanguage.LanguageTag);
            CultureInfo culture = lang.GetEquivalentCulture();

            bool scaleBMP = true;

            if (singlePoint != null ||
                bmp.Width * 1.5 > OcrEngine.MaxImageDimension)
            {
                scaleBMP = false;
            }

            Bitmap scaledBitmap;

            if (scaleBMP)
            {
                scaledBitmap = ScaleBitmapUniform(bmp, 1.5);
            }
            else
            {
                scaledBitmap = ScaleBitmapUniform(bmp, 1.0);
            }

            StringBuilder text = new();

            await using (MemoryStream memory = new())
            {
                scaledBitmap.Save(memory, ImageFormat.Bmp);
                memory.Position = 0;
                BitmapDecoder bmpDecoder = await BitmapDecoder.CreateAsync(memory.AsRandomAccessStream());

                SoftwareBitmap softwareBmp = await bmpDecoder.GetSoftwareBitmapAsync();

                OcrEngine ocrEngine = OcrEngine.TryCreateFromLanguage(selectedLanguage);
                OcrResult ocrResult = await ocrEngine.RecognizeAsync(softwareBmp);

                if (singlePoint == null)
                {
                    foreach (OcrLine line in ocrResult.Lines)
                    {
                        text.AppendLine(line.Text);
                    }
                }
                else
                {
                    Windows.Foundation.Point fPoint = new Windows.Foundation.Point(singlePoint.Value.X, singlePoint.Value.Y);
                    foreach (OcrLine ocrLine in ocrResult.Lines)
                    {
                        foreach (OcrWord ocrWord in ocrLine.Words)
                        {
                            if (ocrWord.BoundingRect.Contains(fPoint))
                            {
                                _ = text.Append(ocrWord.Text);
                            }
                        }
                    }
                }
            }
            if (culture.TextInfo.IsRightToLeft)
            {
                string[] textListLines = text.ToString().Split(new char[] { '\n', '\r' });

                _ = text.Clear();
                foreach (string textLine in textListLines)
                {
                    List <string> wordArray = textLine.Split().ToList();
                    wordArray.Reverse();
                    _ = text.Append(string.Join(' ', wordArray));

                    if (textLine.Length > 0)
                    {
                        _ = text.Append('\n');
                    }
                }
                return(text.ToString());
            }
            else
            {
                return(text.ToString());
            }
        }
Beispiel #13
0
        public async Task <Tuple <string, int> > PerformOcr(SoftwareBitmap bitmap, int confidenceThreshold, bool isOneThirdCapture, int oneThirdTopHeight)
        {
            string returnValue     = "";
            int    confidenceValue = 0;
            int    confidence      = 100; //100 is max, start decreasing

            if (_isPerformingOcr)
            {
                //skip if still processing previous
                return(new Tuple <string, int>("", 0));
            }

            _isPerformingOcr = true;

            Log.Information($"OCR PerformOcr _isPerformingOcr {_isPerformingOcr}");

            //if this is the first iteration: init all settings, if not check if loop for same sentence is => 5 (= seconds)
            // then speak out otherwise it takes too long
            if (_cachedResultSetLoop > 4 && _cachedResultSet != null && !_cachedResultSet.SpokenOut)
            {
                Log.Information($"OCR _cachedResultSetLoop > 5 - assign before continuing");
                int maxConf            = _cachedResultSet.Collection.Max(x => x.Confidence);
                DetectedOcrString item = _cachedResultSet.Collection.FirstOrDefault(x => x.Confidence == maxConf);

                if (item != null)
                {
                    Log.Information($"OCR _cachedResultSetLoop > 5 - assign before continuing confidence {item.Confidence}");

                    _cachedResultSet.SpokenOut = true;
                    returnValue     = item.Sentence;
                    confidenceValue = item.Confidence;
                }
            }

            OcrResult ocrResult = await _ocrEngine.RecognizeAsync(bitmap);

            if (ocrResult == null)
            {
                _isPerformingOcr = false;
                Log.Information($"OCR null.");

                //call take care of current cachedResultSet synthesizing if any in cachedResultset
                if (_cachedResultSet.Collection.Count > 0)
                {
                    //select the most confident and set back to 0
                    int maxConf            = _cachedResultSet.Collection.Max(x => x.Confidence);
                    DetectedOcrString item = _cachedResultSet.Collection.FirstOrDefault(x => x.Confidence == maxConf);

                    if (item != null)
                    {
                        returnValue     = item.Sentence;
                        confidenceValue = item.Confidence;
                    }
                    //reset for loop
                    _cachedResultSet     = new CachedOcrResultSet();
                    _cachedResultSetLoop = 0;
                }

                return(new Tuple <string, int>(returnValue, confidenceValue));
            }

            var resultTextString    = "";
            var ocrResultTextString = "";
            int totalWords          = 0;

            if (ocrResult.Lines.Count > 0)
            {
                Log.Information($"OcrDetection - Raw text: {ocrResult.Text}");
                double left     = bitmap.PixelWidth;  //X
                double top      = bitmap.PixelHeight; //Y
                Rect   tempRect = new Rect();

                foreach (var word in ocrResult.Lines.SelectMany(x => x.Words))
                {
                    left = (word.BoundingRect.Left < left) ? word.BoundingRect.Left : left;
                    top  = (word.BoundingRect.Top < top) ? word.BoundingRect.Top : top;
                    if (totalWords == 0)
                    {
                        tempRect = new Rect(word.BoundingRect.Left, word.BoundingRect.Top, word.BoundingRect.Width, word.BoundingRect.Height);
                    }

                    ocrResultTextString += word.Text + " ";
                    totalWords++;
                    tempRect.Union(word.BoundingRect);
                }

                Rect boundingRectangle = new Rect(left, top, tempRect.Width, tempRect.Height);

                if (ocrResultTextString.Length < 3)
                {
                    confidence       = 50;
                    _isPerformingOcr = false;
                    return(new Tuple <string, int>("", confidence));
                }

                //if more than 2 lines then lower confidence / note applicable for subtitle reading, N/A if not
                if (ocrResult.Lines.Count > 2)
                {
                    confidence -= ocrResult.Lines.Count > 3 ? 10 : 5;
                }

                bool angleTooOff = false;
                if (ocrResult.TextAngle != null)
                {
                    //not holding camera horizontally, let's skip these words
                    angleTooOff = ocrResult.TextAngle < -3 || ocrResult.TextAngle > 3 ? true : false;
                    if (angleTooOff)
                    {
                        //skip everything and and return function
                        _isPerformingOcr = false;
                        return(new Tuple <string, int>("", confidence));
                    }
                }

                Tuple <string, int> res = OptimizeResult(ocrResultTextString, confidence, totalWords);
                resultTextString = res.Item1;
                confidence       = res.Item2;
                Log.Debug($"OcrDetection - OCR optimized: {resultTextString} Confidence: {confidence}");


                //get line height - discard when not checking for subtitles
                if (isOneThirdCapture)
                {
                    int    maxLineHeight     = (oneThirdTopHeight / 2) - 10;
                    double largestLineHeight = ocrResult.Lines.Max(w => w.Words.Max(l => l.BoundingRect.Height));
                    if (largestLineHeight > maxLineHeight)
                    {
                        //lower confidence
                        confidence -= 15;
                        Log.Debug($"OCR line height largest: {largestLineHeight} / max {maxLineHeight}, new confidence {confidence}");
                    }
                }

                //check ocr is not giving results outside of preferred top zone of frame / this should be removed if getting this to work on non subtitles zones
                if (boundingRectangle.Y + boundingRectangle.Height > oneThirdTopHeight)
                {
                    //lower confidence
                    confidence -= 10;
                    Log.Debug($"OCR rect is outside of top third (third value: {oneThirdTopHeight}) / y+height: {boundingRectangle.Y + boundingRectangle.Height}, new confidence {confidence}");
                }

                if (resultTextString.Trim().Length <= 4)
                {
                    Log.Debug($"OcrDetection - OCR optimized: {resultTextString}, Confidence: {confidence} - skipping length < 4 chars");
                    //skip everything and continue with next frame as if nothing
                    _isPerformingOcr = false;
                    return(new Tuple <string, int>("", confidence));
                }

                if (confidence > confidenceThreshold)
                {
                    int    difference = Helpers.TextHelpers.DamerauLevenshteinDistance(resultTextString, _previousOcrResultString);
                    double compare    = Helpers.TextHelpers.CompareStrings(resultTextString, _previousOcrResultString);
                    Log.Information($"OcrDetection previous difference: {difference} / compare percent: {compare}");

                    DetectedOcrString ocrItem = new DetectedOcrString()
                    {
                        BoundingBox = boundingRectangle, Confidence = confidence, Sentence = resultTextString, WordCount = totalWords
                    };

                    if (_cachedResultSet.Collection.Count < 1)
                    {
                        _cachedResultSet.Collection.Add(ocrItem);
                        _cachedResultSetLoop += 1;
                        Log.Debug($"OcrDetection - _cachedResultSetLoop <1, : {_cachedResultSetLoop} ");
                    }
                    else
                    {
                        bool similarBoundingRectangle = IsBoundingRectangleSimilar(boundingRectangle, _cachedResultSet.Collection[_cachedResultSet.Collection.Count - 1].BoundingBox);   //true if 3 points match out of four top,left,right,bottom
                        bool identicalLeftOrRight     = IsPositionLeftRightIdentical(boundingRectangle, _cachedResultSet.Collection[_cachedResultSet.Collection.Count - 1].BoundingBox); //true if either side is equal

                        //if similar stuff add to collection
                        if (difference < 10 || compare > 0.8 || (similarBoundingRectangle && compare > 0.5) || (identicalLeftOrRight && compare > 0.5))
                        {
                            //add to list in any case
                            _cachedResultSet.Collection.Add(ocrItem);
                            _cachedResultSetLoop += 1;
                            Log.Information($"OcrDetection - similar, add to existing - _cachedResultSetLoop {_cachedResultSetLoop} ");
                        }
                        else
                        {
                            //first check if results are not similar
                            if (_previousOrcResult != null && WordsPositionsSimilarity(ocrResult, _previousOrcResult))
                            {
                                //add to list in any case but lower score
                                if (_previousOcrResultString.Length > resultTextString.Length && _previousOcrResultString.Length - resultTextString.Length > 5)
                                {
                                    ocrItem.Confidence -= 5;
                                    Log.Debug($"OcrDetection - WordsPositionsSimilarity lower confidence by -5 ");
                                }
                                _cachedResultSet.Collection.Add(ocrItem);
                                _cachedResultSetLoop += 1;
                                Log.Debug($"OcrDetection - WordsPositionsSimilarity add to existing - _cachedResultSetLoop {_cachedResultSetLoop} ");
                            }
                            else
                            {
                                //else speak out previous and reset with new
                                //(if not already spoken - if larger than 4th iteration it was already spoken out)
                                if (!_cachedResultSet.SpokenOut)
                                {
                                    int maxConf            = _cachedResultSet.Collection.Max(x => x.Confidence);
                                    DetectedOcrString item = _cachedResultSet.Collection.FirstOrDefault(x => x.Confidence == maxConf);

                                    if (item != null)
                                    {
                                        _cachedResultSet.SpokenOut = true;
                                        returnValue     = item.Sentence;
                                        confidenceValue = item.Confidence;
                                    }
                                }

                                //reset for loop
                                _cachedResultSet = new CachedOcrResultSet();
                                _cachedResultSet.Collection.Add(ocrItem);
                                _cachedResultSetLoop = 1;
                                Log.Debug($"OcrDetection - not similar, reset to 1 - _cachedResultSetLoop {_cachedResultSetLoop} ");
                            }
                        }
                    }

                    _previousOrcResult       = ocrResult;
                    _previousOcrResultString = resultTextString;
                }
            }
            else
            {
                if (_cachedResultSet.Collection.Count > 0)
                {
                    DetectedOcrString ocrItem = _cachedResultSet.Collection[_cachedResultSet.Collection.Count - 1];
                    _cachedResultSet.Collection.Add(ocrItem);
                    _cachedResultSetLoop += 1;
                    Log.Debug($"OcrDetection - no OCR lines, keeping last to loop, : {_cachedResultSetLoop} ");
                }
            }

            _isPerformingOcr = false;
            return(new Tuple <string, int>(returnValue, confidenceValue));
        }
Beispiel #14
0
        protected async override void OnLaunched(LaunchActivatedEventArgs e)
        {
            List<MyLine> listLines = new List<MyLine>();

            //int minSize = 40;
            //int maxSize = 2600;
            int wordTopRange = 25;

            StorageFolder folder = Windows.Storage.ApplicationData.Current.LocalFolder;
            StorageFile file = null;

            if (!string.IsNullOrEmpty(e.Arguments))
            {
                try
                {
                    OcrEngine ocrEngine = new OcrEngine(OcrLanguage.English);

                    file = await folder.GetFileAsync(e.Arguments);
                    ImageProperties imgProp = await file.Properties.GetImagePropertiesAsync();

                    //if (imgProp.Height < minSize || imgProp.Height > maxSize || imgProp.Width < minSize || imgProp.Width > maxSize)
                    //{
                    //    await WriteToFile(folder, file.Name + ".txt", "Image size must be > 40 and < 2600 pixel");
                    //}
                    //else
                    //{
                    WriteableBitmap bitmap = null;

                    using (IRandomAccessStream imgStream = await file.OpenAsync(FileAccessMode.Read))
                    {
                        bitmap = new WriteableBitmap((int)imgProp.Width, (int)imgProp.Height);
                        bitmap.SetSource(imgStream);
                    }

                    // This main API call to extract text from image.
                    OcrResult ocrResult = await ocrEngine.RecognizeAsync((uint)bitmap.PixelHeight, (uint)bitmap.PixelWidth, bitmap.PixelBuffer.ToArray());

                    // If there is text. 
                    if (ocrResult.Lines != null)
                    {
                        StringBuilder builder = new StringBuilder();

                        // loop over recognized text.
                        foreach (OcrLine line in ocrResult.Lines)
                        {
                            // Iterate over words in line.
                            foreach (OcrWord word in line.Words)
                            {
                                // sort the word line by line
                                bool isBelongToLine = false;
                                foreach (MyLine myLine in listLines)
                                {
                                    // if line exist, add word to line
                                    if (Between(myLine.lineNumber, word.Top - wordTopRange, word.Top + wordTopRange, true))
                                    {
                                        myLine.listWords.Add(word);
                                        isBelongToLine = true;
                                        break;
                                    }
                                }

                                // if line does not exist, create new line, add word to line
                                if (isBelongToLine == false)
                                {
                                    MyLine myLine = new MyLine();
                                    myLine.lineNumber = word.Top;
                                    myLine.listWords.Add(word);
                                    listLines.Add(myLine);
                                }
                                // sort the word line by line
                            }
                        }

                        // sort the lines base on top position
                        listLines.Sort();

                        // return data line by line
                        foreach (MyLine myLine in listLines)
                        {
                            builder.Append(myLine.ToString());
                        }


                        await WriteToFile(folder, file.Name + ".txt", builder.ToString());
                    }
                    else // if no text
                    {
                        await WriteToFile(folder, file.Name + ".txt", "No Text");
                    }

                    //}

                }
                catch (Exception ex)
                {
                    await WriteToFile(folder, file.Name + ".txt", "Exception");
                }

                App.Current.Exit();
            }
        }
Beispiel #15
0
        private async void takePicture()
        {
            //Get information about the preview.
            bool foundLicensePlate = false;
            bool foundValidPermit  = false;
            var  previewProperties = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
            int  videoFrameWidth   = (int)previewProperties.Width;
            int  videoFrameHeight  = (int)previewProperties.Height;

            // In portrait modes, the width and height must be swapped for the VideoFrame to have the correct aspect ratio and avoid letterboxing / black bars.
            if (!externalCamera && (displayInformation.CurrentOrientation == DisplayOrientations.Portrait || displayInformation.CurrentOrientation == DisplayOrientations.PortraitFlipped))
            {
                videoFrameWidth  = (int)previewProperties.Height;
                videoFrameHeight = (int)previewProperties.Width;
            }

            // Create the video frame to request a SoftwareBitmap preview frame.
            var videoFrame = new VideoFrame(BitmapPixelFormat.Bgra8, videoFrameWidth, videoFrameHeight);

            // Capture the preview frame.
            using (var currentFrame = await mediaCapture.GetPreviewFrameAsync(videoFrame))
            {
                // Collect the resulting frame.
                SoftwareBitmap bitmap = currentFrame.SoftwareBitmap;

                OcrEngine ocrEngine = OcrEngine.TryCreateFromLanguage(ocrLanguage);

                if (ocrEngine == null)
                {
                    rootPage.NotifyUser(ocrLanguage.DisplayName + " is not supported.", NotifyType.ErrorMessage);

                    return;
                }

                var imgSource = new WriteableBitmap(bitmap.PixelWidth, bitmap.PixelHeight);
                bitmap.CopyToBuffer(imgSource.PixelBuffer);
                PreviewImage.Source = imgSource;

                var ocrResult = await ocrEngine.RecognizeAsync(bitmap);

                // Used for text overlay.
                // Prepare scale transform for words since image is not displayed in original format.
                var scaleTrasform = new ScaleTransform
                {
                    CenterX = 0,
                    CenterY = 0,
                    ScaleX  = PreviewControl.ActualWidth / bitmap.PixelWidth,
                    ScaleY  = PreviewControl.ActualHeight / bitmap.PixelHeight
                };

                if (ocrResult.TextAngle != null)
                {
                    // If text is detected under some angle in this sample scenario we want to
                    // overlay word boxes over original image, so we rotate overlay boxes.
                    TextOverlay.RenderTransform = new RotateTransform
                    {
                        Angle   = (double)ocrResult.TextAngle,
                        CenterX = PreviewImage.ActualWidth / 2,
                        CenterY = PreviewImage.ActualHeight / 2
                    };
                }

                // Iterate over recognized lines of text.
                foreach (var line in ocrResult.Lines)
                {
                    // Iterate over words in line.
                    foreach (var word in line.Words)
                    {
                        Debug.WriteLine(word.Text);
                        //harrison start
                        if (word.Text.Equals("Parking"))
                        {
                            foundValidPermit = true;
                        }
                        if (word.Text.Equals("ONTARIO"))
                        {
                            foundLicensePlate = true;
                        }
                        //harrison end
                        // Define the TextBlock.
                        var wordTextBlock = new TextBlock()
                        {
                            Text  = word.Text,
                            Style = (Style)this.Resources["ExtractedWordTextStyle"]
                        };

                        WordOverlay wordBoxOverlay = new WordOverlay(word);

                        // Keep references to word boxes.
                        wordBoxes.Add(wordBoxOverlay);

                        // Define position, background, etc.
                        var overlay = new Border()
                        {
                            Child = wordTextBlock,
                            Style = (Style)this.Resources["HighlightedWordBoxHorizontalLine"]
                        };

                        // Bind word boxes to UI.
                        overlay.SetBinding(Border.MarginProperty, wordBoxOverlay.CreateWordPositionBinding());
                        overlay.SetBinding(Border.WidthProperty, wordBoxOverlay.CreateWordWidthBinding());
                        overlay.SetBinding(Border.HeightProperty, wordBoxOverlay.CreateWordHeightBinding());

                        // Put the filled textblock in the results grid.
                        TextOverlay.Children.Add(overlay);
                    }
                }

                rootPage.NotifyUser("Image processed using " + ocrEngine.RecognizerLanguage.DisplayName + " language.", NotifyType.StatusMessage);
            }

            UpdateWordBoxTransform();


            //harrison start
            if (foundValidPermit && foundLicensePlate)
            {
                showValidPermit();
            }

            else if (foundLicensePlate)
            {
                showNoValidPermit();
            }
            //PreviewControl.Visibility = Visibility.Collapsed;
            //Image.Visibility = Visibility.Visible;
            //harrison end

            //ExtractButton.Visibility = Visibility.Collapsed;
            //CameraButton.Visibility = Visibility.Visible;
        }
Beispiel #16
0
        private async void OCR()
        {
            if (bitmap.PixelWidth > OcrEngine.MaxImageDimension || bitmap.PixelHeight > OcrEngine.MaxImageDimension)
            {
                BitmapLang.ocrEngine      = null;
                BitmapLang.Name           = null;
                BitmapLang.softwareBitmap = null;
                Windows.UI.Popups.MessageDialog msg = new Windows.UI.Popups.MessageDialog("Wymiary tego zdjęcia są zbyt duże, użyj mniejszego.");
                await msg.ShowAsync();

                this.Frame.Navigate(typeof(MainPage));
            }


            if (ocrEngine != null)
            {
                // Rozpoznawanie tekstu z bitmapy
                var ocrResult = await ocrEngine.RecognizeAsync(bitmap);

                if (ocrResult == null || ocrResult.Text == "")
                {
                    BitmapLang.ocrEngine      = null;
                    BitmapLang.Name           = null;
                    BitmapLang.softwareBitmap = null;
                    Windows.UI.Popups.MessageDialog msg = new Windows.UI.Popups.MessageDialog("Nasz algorytm uznał, że nie ma na tym zdjęciu żadnego tekstu.");
                    await msg.ShowAsync();

                    this.Frame.Navigate(typeof(MainPage));
                }

                Windows.Storage.StorageFolder localFolder = await Windows.Storage.ApplicationData.Current.LocalFolder.CreateFolderAsync("OCR_UKW_Project", Windows.Storage.CreationCollisionOption.OpenIfExists);

                DataFile = await localFolder.CreateFileAsync("OCR_UKW_Project_UserData.txt", CreationCollisionOption.OpenIfExists);

                string tmpData = "";
                try { tmpData = await FileIO.ReadTextAsync(DataFile); }
                catch { tmpData = ""; }
                tmpData = tmpData + DateTimeOffset.Now + BitmapLang.Name + "\n";
                await FileIO.WriteTextAsync(DataFile, tmpData);

                // Wyświetlenie tekstu
                ExtractedTextBox.Text = ocrResult.Text;

                if (ocrResult.TextAngle != null)
                {
                    // Jeżeli tekst jest pod katem to obracamy grida
                    TextOverlay.RenderTransform = new RotateTransform
                    {
                        Angle   = (double)ocrResult.TextAngle,
                        CenterX = PreviewImage.ActualWidth / 2,
                        CenterY = PreviewImage.ActualHeight / 2
                    };
                }

                // Tworzenie zaznaczeń dla otrzymanych słów
                foreach (var line in ocrResult.Lines)
                {
                    Rect lineRect = Rect.Empty;
                    foreach (var word in line.Words)
                    {
                        lineRect.Union(word.BoundingRect);
                    }

                    // Ustalenia czy tekst jest w pionie czy poziomie (pion np. dla chin).
                    bool isVerticalLine = lineRect.Height > lineRect.Width;

                    foreach (var word in line.Words)
                    {
                        WordOverlay wordBoxOverlay = new WordOverlay(word);

                        // dodanie do listy
                        wordBoxes.Add(wordBoxOverlay);

                        // Jaki typ tekstu
                        var overlay = new Border()
                        {
                            Style = isVerticalLine ?
                                    (Style)this.Resources["HighlightedWordBoxVerticalLine"] :
                                    (Style)this.Resources["HighlightedWordBoxHorizontalLine"]
                        };

                        // Przypisanie word box'a do border
                        overlay.SetBinding(Border.MarginProperty, wordBoxOverlay.CreateWordPositionBinding());
                        overlay.SetBinding(Border.WidthProperty, wordBoxOverlay.CreateWordWidthBinding());
                        overlay.SetBinding(Border.HeightProperty, wordBoxOverlay.CreateWordHeightBinding());

                        // Dodaje box'a do grid'a
                        TextOverlay.Children.Add(overlay);
                    }
                }

                // Rescale word boxes to match current UI size.
                UpdateWordBoxTransform();
            }
            else
            {
                Windows.UI.Popups.MessageDialog msg = new Windows.UI.Popups.MessageDialog("Wybrany język nie jest dostępny. Spróbuj ponownie.");
                await msg.ShowAsync();
            }
        }
Beispiel #17
0
        protected async override void OnLaunched(LaunchActivatedEventArgs e)
        {
            StorageFolder folder = Windows.Storage.ApplicationData.Current.LocalFolder;
            if (!string.IsNullOrEmpty(e.Arguments))
            {
                try
                {
                    Debug.WriteLine("args: " + e.Arguments);
                    var ocrEngine = new OcrEngine(OcrLanguage.English);
                    var file = await folder.GetFileAsync(e.Arguments);
                    ImageProperties imgProp = await file.Properties.GetImagePropertiesAsync();
                    WriteableBitmap bitmap;
                    using (var imgStream = await file.OpenAsync(FileAccessMode.Read))
                    {
                        bitmap = new WriteableBitmap((int)imgProp.Width, (int)imgProp.Height);
                        bitmap.SetSource(imgStream);
                    }
                    // Check whether is loaded image supported for processing.
                    // Supported image dimensions are between 40 and 2600 pixels.
                    if (bitmap.PixelHeight < 40 ||
                        bitmap.PixelHeight > 2600 ||
                        bitmap.PixelWidth < 40 ||
                        bitmap.PixelWidth > 2600)
                    {
                        //write invalid image to output

                        return;
                    }

                    // This main API call to extract text from image.
                    var ocrResult = await ocrEngine.RecognizeAsync((uint)bitmap.PixelHeight, (uint)bitmap.PixelWidth, bitmap.PixelBuffer.ToArray());

                    // OCR result does not contain any lines, no text was recognized. 
                    if (ocrResult.Lines != null)
                    {
                        bool hasVerticalLines = false;
                        JsonObject jsonOjbect = new JsonObject();
                        jsonOjbect.Add("text_angle", JsonValue.CreateNumberValue(ocrResult.TextAngle.HasValue ? ocrResult.TextAngle.Value : 0d));
                        string extractedText = "";

                        JsonArray wordsArray = new JsonArray();
                        jsonOjbect.Add("words", wordsArray);

                        // Iterate over recognized lines of text.
                        foreach (var line in ocrResult.Lines)
                        {
                            hasVerticalLines |= line.IsVertical;
                            JsonArray lineArray = new JsonArray();
                            foreach (var word in line.Words)
                            {
                                JsonObject wordJson = new JsonObject();
                                wordsArray.Add(wordJson);
                                wordJson.Add("top", JsonValue.CreateNumberValue(word.Top));
                                wordJson.Add("left", JsonValue.CreateNumberValue(word.Left));
                                wordJson.Add("width", JsonValue.CreateNumberValue(word.Width));
                                wordJson.Add("height", JsonValue.CreateNumberValue(word.Height));
                                wordJson.Add("text", JsonValue.CreateStringValue(word.Text));

                                extractedText += word.Text + " ";
                            }
                            extractedText += line.Words.Select(w => w.Text).Aggregate((w1,w2) => w1 + " " + w2);
                            extractedText += '\n';
                        }
                        jsonOjbect.Add("text", JsonValue.CreateStringValue(extractedText));
                        jsonOjbect.Add("has_vertical_line", JsonValue.CreateBooleanValue(hasVerticalLines));
                        await WriteToFile(folder, file.Name + ".txt", jsonOjbect.Stringify());
                    }
                    else
                    {

                        await WriteToFile(folder, "failed.txt", "No Text");
                    }

                }
                catch (Exception ex)
                {
                    await WriteToFile(folder, "failed.txt", ex.Message + "\r\n"+ex.StackTrace);
                }
                App.Current.Exit();
            }
        }
Beispiel #18
0
        /// <summary>
        /// This is click handler for Extract Text button.
        /// If image size is supported text is extracted and overlaid over displayed image.
        /// Supported image dimensions are between 40 and 2600 pixels.
        /// </summary>
        private async void ExtractText_Click(object sender, RoutedEventArgs e)
        {
            // Prevent another OCR request, since only image can be processed at the time at same OCR engine instance.
            ExtractTextButton.IsEnabled = false;

            // Check whether is loaded image supported for processing.
            // Supported image dimensions are between 40 and 2600 pixels.
            if (bitmap.PixelHeight < 40 ||
                bitmap.PixelHeight > 2600 ||
                bitmap.PixelWidth < 40 ||
                bitmap.PixelWidth > 2600)
            {
                ImageText.Text = "Image size is not supported." +
                                 Environment.NewLine +
                                 "Loaded image size is " + bitmap.PixelWidth + "x" + bitmap.PixelHeight + "." +
                                 Environment.NewLine +
                                 "Supported image dimensions are between 40 and 2600 pixels.";
                ImageText.Style = (Style)Application.Current.Resources["RedTextStyle"];

                rootPage.NotifyUser(
                    String.Format("OCR was attempted on image with unsupported size. " +
                                  Environment.NewLine +
                                  "Supported image dimensions are between 40 and 2600 pixels."),
                    NotifyType.ErrorMessage);

                return;
            }

            // This main API call to extract text from image.
            var ocrResult = await ocrEngine.RecognizeAsync((uint)bitmap.PixelHeight, (uint)bitmap.PixelWidth, bitmap.PixelBuffer.ToArray());

            // OCR result does not contain any lines, no text was recognized.
            if (ocrResult.Lines != null)
            {
                // Used for text overlay.
                // Prepare scale transform for words since image is not displayed in original format.
                var scaleTrasform = new ScaleTransform
                {
                    CenterX = 0,
                    CenterY = 0,
                    ScaleX  = PreviewImage.ActualWidth / bitmap.PixelWidth,
                    ScaleY  = PreviewImage.ActualHeight / bitmap.PixelHeight,
                };

                if (ocrResult.TextAngle != null)
                {
                    // If text is detected under some angle then
                    // apply a transform rotate on image around center.
                    PreviewImage.RenderTransform = new RotateTransform
                    {
                        Angle   = (double)ocrResult.TextAngle,
                        CenterX = PreviewImage.ActualWidth / 2,
                        CenterY = PreviewImage.ActualHeight / 2
                    };
                }

                string extractedText = "";

                // Iterate over recognized lines of text.
                foreach (var line in ocrResult.Lines)
                {
                    // Iterate over words in line.
                    foreach (var word in line.Words)
                    {
                        var originalRect = new Rect(word.Left, word.Top, word.Width, word.Height);
                        var overlayRect  = scaleTrasform.TransformBounds(originalRect);

                        // Define the TextBlock.
                        var wordTextBlock = new TextBlock()
                        {
                            Height   = overlayRect.Height,
                            Width    = overlayRect.Width,
                            FontSize = overlayRect.Height * 0.8,
                            Text     = word.Text,
                            Style    = (Style)Application.Current.Resources["ExtractedWordTextStyle"]
                        };

                        // Define position, background, etc.
                        var border = new Border()
                        {
                            Margin = new Thickness(overlayRect.Left, overlayRect.Top, 0, 0),
                            Height = overlayRect.Height,
                            Width  = overlayRect.Width,
                            Child  = wordTextBlock,
                            Style  = (Style)Application.Current.Resources["ExtractedWordBorderStyle"]
                        };

                        // Put the filled textblock in the results grid.
                        TextOverlay.Children.Add(border);

                        extractedText += word.Text + " ";
                    }
                    extractedText += Environment.NewLine;
                }

                ImageText.Text  = extractedText;
                ImageText.Style = (Style)Application.Current.Resources["GreenTextStyle"];
            }
            else
            {
                ImageText.Text  = "No text.";
                ImageText.Style = (Style)Application.Current.Resources["RedTextStyle"];
            }

            rootPage.NotifyUser(
                String.Format("Image successfully processed in {0} language.", ocrEngine.Language.ToString()),
                NotifyType.StatusMessage);
        }
Beispiel #19
0
        private async Task <string> getCheckCode()
        {
            HttpWebRequest request = (HttpWebRequest)System.Net.WebRequest.Create("http://jwbinfosys.zju.edu.cn/CheckCode.aspx");

            request.Method          = "GET";
            request.CookieContainer = cookieContainer;
            HttpWebResponse response;

            try
            {
                response = (HttpWebResponse)await request.GetResponseAsync();
            }
            catch
            {
                return(null);
            }

            //HttpWebResponse response = (HttpWebResponse)await request.GetResponseAsync();
            //保存cookie
            cookieContainer.Add(new Uri("http://jwbinfosys.zju.edu.cn/CheckCode.aspx"), response.Cookies);

            //get the checkcode.gif and resize,for OCR requires image size>40*40
            StorageFile checkCodeImage = await ApplicationData.Current.LocalFolder.CreateFileAsync("checkCode.gif", CreationCollisionOption.ReplaceExisting);

            byte[]          pixels;
            BitmapTransform transform = new BitmapTransform();

            transform.ScaledWidth  = 120; //the length and width were doubled.
            transform.ScaledHeight = 44;
            //start
            using (Stream webResponseStream = response.GetResponseStream())
                using (var responseStream = await checkCodeImage.OpenStreamForWriteAsync())
                {
                    await webResponseStream.CopyToAsync(responseStream);
                }
            using (var responseStream = await checkCodeImage.OpenAsync(FileAccessMode.ReadWrite))
            {
                var decoder = await BitmapDecoder.CreateAsync(responseStream);

                PixelDataProvider pix = await decoder.GetPixelDataAsync(
                    BitmapPixelFormat.Bgra8,
                    BitmapAlphaMode.Straight,
                    transform,
                    ExifOrientationMode.IgnoreExifOrientation,
                    ColorManagementMode.ColorManageToSRgb);

                pixels = pix.DetachPixelData();
            }

            var checkCode = new WriteableBitmap(120, 44);

            using (Stream stream = checkCode.PixelBuffer.AsStream())
            {
                await stream.WriteAsync(pixels, 0, pixels.Length);
            }

            ////recognize the checkcode with orc
            OcrEngine ocrEngine = new OcrEngine(OcrLanguage.English);
            var       ocrResult = await ocrEngine.RecognizeAsync(44, 120, checkCode.PixelBuffer.ToArray());

            if (ocrResult.Lines != null)
            {
                return(ocrResult.Lines[0].Words[0].Text);
            }
            return(null);
        }
Beispiel #20
0
        public async Task <string> ExtractText(Bitmap bmp, string languageCode, System.Windows.Point?singlePoint = null)
        {
            Language        selectedLanguage = new Language(languageCode);
            List <Language> possibleOCRLangs = OcrEngine.AvailableRecognizerLanguages.ToList();

            if (possibleOCRLangs.Count < 1)
            {
                throw new ArgumentOutOfRangeException($"No possible OCR languages are installed.");
            }

            if (possibleOCRLangs.Where(l => l.LanguageTag == selectedLanguage.LanguageTag).Count() < 1)
            {
                List <Language> similarLanguages = possibleOCRLangs.Where(la => la.AbbreviatedName == selectedLanguage.AbbreviatedName).ToList();
                if (similarLanguages.Count() > 0)
                {
                    selectedLanguage = similarLanguages.FirstOrDefault();
                }
                else
                {
                    selectedLanguage = possibleOCRLangs.FirstOrDefault();
                }
            }

            bool scaleBMP = true;

            if (singlePoint != null ||
                bmp.Width * 1.5 > OcrEngine.MaxImageDimension)
            {
                scaleBMP = false;
            }

            Bitmap scaledBitmap;

            if (scaleBMP)
            {
                scaledBitmap = ScaleBitmapUniform(bmp, 1.5);
            }
            else
            {
                scaledBitmap = ScaleBitmapUniform(bmp, 1.0);
            }

            StringBuilder text = new StringBuilder();

            XmlLanguage lang    = XmlLanguage.GetLanguage(languageCode);
            CultureInfo culture = lang.GetEquivalentCulture();

            await using (MemoryStream memory = new MemoryStream())
            {
                scaledBitmap.Save(memory, ImageFormat.Bmp);
                memory.Position = 0;
                BitmapDecoder bmpDecoder = await BitmapDecoder.CreateAsync(memory.AsRandomAccessStream());

                Windows.Graphics.Imaging.SoftwareBitmap softwareBmp = await bmpDecoder.GetSoftwareBitmapAsync();

                OcrEngine ocrEngine = OcrEngine.TryCreateFromLanguage(selectedLanguage);
                OcrResult ocrResult = await ocrEngine.RecognizeAsync(softwareBmp);

                if (singlePoint == null)
                {
                    foreach (OcrLine line in ocrResult.Lines)
                    {
                        text.AppendLine(line.Text);
                    }
                }
                else
                {
                    Windows.Foundation.Point fPoint = new Windows.Foundation.Point(singlePoint.Value.X, singlePoint.Value.Y);
                    foreach (OcrLine ocrLine in ocrResult.Lines)
                    {
                        foreach (OcrWord ocrWord in ocrLine.Words)
                        {
                            if (ocrWord.BoundingRect.Contains(fPoint))
                            {
                                text.Append(ocrWord.Text);
                            }
                        }
                    }
                }
            }
            if (culture.TextInfo.IsRightToLeft)
            {
                List <string> textListLines = text.ToString().Split(new char[] { '\n', '\r' }).ToList();

                text.Clear();
                foreach (string textLine in textListLines)
                {
                    List <string> wordArray = textLine.Split().ToList();
                    wordArray.Reverse();
                    text.Append(string.Join(' ', wordArray));

                    if (textLine.Length > 0)
                    {
                        text.Append('\n');
                    }
                }
                return(text.ToString());
            }
            else
            {
                return(text.ToString());
            }
        }
Beispiel #21
0
        private async Task<string> getCheckCode()
        {            
            HttpWebRequest request = (HttpWebRequest)System.Net.WebRequest.Create("http://jwbinfosys.zju.edu.cn/CheckCode.aspx");
            request.Method = "GET";
            request.CookieContainer =cookieContainer;
            HttpWebResponse response;
            try
            {
                response = (HttpWebResponse)await request.GetResponseAsync();
            }
            catch
            {               
                return null;
            }

            //HttpWebResponse response = (HttpWebResponse)await request.GetResponseAsync();
            //保存cookie            
            cookieContainer.Add(new Uri("http://jwbinfosys.zju.edu.cn/CheckCode.aspx"), response.Cookies);

            //get the checkcode.gif and resize,for OCR requires image size>40*40
            StorageFile checkCodeImage = await ApplicationData.Current.LocalFolder.CreateFileAsync("checkCode.gif", CreationCollisionOption.ReplaceExisting);

            byte[] pixels;
            BitmapTransform transform = new BitmapTransform();
            transform.ScaledWidth = 120; //the length and width were doubled.
            transform.ScaledHeight = 44;
            //start            
            using (Stream webResponseStream = response.GetResponseStream())
            using (var responseStream = await checkCodeImage.OpenStreamForWriteAsync())
            {
                await webResponseStream.CopyToAsync(responseStream);
            }
            using (var responseStream = await checkCodeImage.OpenAsync(FileAccessMode.ReadWrite))
            {
                var decoder = await BitmapDecoder.CreateAsync(responseStream);
                PixelDataProvider pix = await decoder.GetPixelDataAsync(
                    BitmapPixelFormat.Bgra8,
                    BitmapAlphaMode.Straight,
                    transform,
                    ExifOrientationMode.IgnoreExifOrientation,
                    ColorManagementMode.ColorManageToSRgb);
                pixels = pix.DetachPixelData();
            }

            var checkCode = new WriteableBitmap(120, 44);
            using (Stream stream = checkCode.PixelBuffer.AsStream())
            {
                await stream.WriteAsync(pixels, 0, pixels.Length);
            }

            ////recognize the checkcode with orc
            OcrEngine ocrEngine = new OcrEngine(OcrLanguage.English);
            var ocrResult = await ocrEngine.RecognizeAsync(44, 120, checkCode.PixelBuffer.ToArray());

            if (ocrResult.Lines != null)
            {               
                return ocrResult.Lines[0].Words[0].Text;
            }          
            return null;
        }
Beispiel #22
0
        private static async Task<string> GetString(List<WriteableBitmap> lstbitmap,OcrEngine ocrEngine)
        {
            string extractedText = "";
            foreach (WriteableBitmap bitmap in lstbitmap)
                {
                    if (bitmap.PixelHeight < 40 ||
                        bitmap.PixelHeight > 2600 ||
                        bitmap.PixelWidth < 40 ||
                        bitmap.PixelWidth > 2600)
                    {
                        extractedText = Environment.NewLine + "Image size is not supported." +
                                    Environment.NewLine +
                                    "Loaded image size is " + bitmap.PixelWidth + "x" + bitmap.PixelHeight + "." +
                                    Environment.NewLine +
                                    "Supported image dimensions are between 40 and 2600 pixels.";
                        //ImageText.Style = (Style)Application.Current.Resources["RedTextStyle"];
                        return extractedText;
                    }


                    try
                    {
                        // This main API call to extract text from image.
                        var ocrResult = await ocrEngine.RecognizeAsync((uint)bitmap.PixelHeight, (uint)bitmap.PixelWidth, bitmap.PixelBuffer.ToArray());

                        //OCR result does not contain any lines, no text was recognized. 
                        if (ocrResult.Lines != null)
                        {
                            // Iterate over recognized lines of text.
                            foreach (var line in ocrResult.Lines)
                            {
                                // Iterate over words in line.
                                foreach (var word in line.Words)
                                {
                                    var originalRect = new Rect(word.Left, word.Top, word.Width, word.Height);
                                    if (ocrEngine.Language != OcrLanguage.ChineseSimplified)
                                        extractedText += word.Text + " ";
                                    else
                                        extractedText += word.Text;
                                }
                                extractedText += Environment.NewLine;
                            }


                        }
                        else
                        {
                        }
                    }
                    catch (Exception ex)
                    {

                    }


                }

            return extractedText;
        }
Beispiel #23
0
        private async void ExtractText_Click(object sender, RoutedEventArgs e)
        {
            //// Prevent another OCR request, since only image can be processed at the time at same OCR engine instance.
            //ExtractTextButton.IsEnabled = false;

            // Check whether is loaded image supported for processing.
            // Supported image dimensions are between 40 and 2600 pixels.
            if (bitmap.PixelHeight < 40 ||
                bitmap.PixelHeight > 2600 ||
                bitmap.PixelWidth < 40 ||
                bitmap.PixelWidth > 2600)
            {
                ImageText.Text = "Image size is not supported." +
                                 Environment.NewLine +
                                 "Loaded image size is " + bitmap.PixelWidth + "x" + bitmap.PixelHeight + "." +
                                 Environment.NewLine +
                                 "Supported image dimensions are between 40 and 2600 pixels.";
                //ImageText.Style = (Style)Application.Current.Resources["RedTextStyle"];

                return;
            }

            // This main API call to extract text from image.
            var ocrResult = await ocrEngine.RecognizeAsync((uint)bitmap.PixelHeight, (uint)bitmap.PixelWidth, bitmap.PixelBuffer.ToArray());

            // OCR result does not contain any lines, no text was recognized.
            if (ocrResult.Lines != null)
            {
                // Used for text overlay.
                // Prepare scale transform for words since image is not displayed in original format.
                var scaleTrasform = new ScaleTransform
                {
                    CenterX = 0,
                    CenterY = 0,
                    ScaleX  = PreviewImage.ActualWidth / bitmap.PixelWidth,
                    ScaleY  = PreviewImage.ActualHeight / bitmap.PixelHeight,
                };

                if (ocrResult.TextAngle != null)
                {
                    PreviewImage.RenderTransform = new RotateTransform
                    {
                        Angle   = (double)ocrResult.TextAngle,
                        CenterX = PreviewImage.ActualWidth / 2,
                        CenterY = PreviewImage.ActualHeight / 2
                    };
                }

                string extractedText = "";

                // Iterate over recognized lines of text.
                foreach (var line in ocrResult.Lines)
                {
                    // Iterate over words in line.
                    foreach (var word in line.Words)
                    {
                        var originalRect = new Rect(word.Left, word.Top, word.Width, word.Height);
                        var overlayRect  = scaleTrasform.TransformBounds(originalRect);

                        var wordTextBlock = new TextBlock()
                        {
                            Height   = overlayRect.Height,
                            Width    = overlayRect.Width,
                            FontSize = overlayRect.Height * 0.8,
                            Text     = word.Text,
                        };

                        // Define position, background, etc.
                        var border = new Border()
                        {
                            Margin              = new Thickness(overlayRect.Left, overlayRect.Top, 0, 0),
                            Height              = overlayRect.Height,
                            Width               = overlayRect.Width,
                            Background          = new SolidColorBrush(Colors.Orange),
                            Opacity             = 0.5,
                            HorizontalAlignment = HorizontalAlignment.Left,
                            VerticalAlignment   = VerticalAlignment.Top,
                            Child               = wordTextBlock,
                        };
                        OverlayTextButton.IsEnabled = true;
                        // Put the filled textblock in the results grid.
                        TextOverlay.Children.Add(border);
                        extractedText += word.Text + " ";
                    }
                    extractedText += Environment.NewLine;
                }

                ImageText.Text = extractedText;
            }
            else
            {
                ImageText.Text = "No text.";
            }
        }
Beispiel #24
0
        private async void RenderPage(PdfPage pdfPage)
        {
            Stopwatch sw = new Stopwatch();

            sw.Start();

            canvas.Children.Clear();

            await pdfPage.PreparePageAsync();

            StorageFolder tempFolder = ApplicationData.Current.TemporaryFolder;
            StorageFile   jpgFile    = await tempFolder.CreateFileAsync(Guid.NewGuid().ToString() + ".png", CreationCollisionOption.GenerateUniqueName);

            PdfPageRenderOptions renderOptions = new PdfPageRenderOptions();

            renderOptions.DestinationHeight = (uint)(pdfPage.Size.Height * 2.0);
            renderOptions.DestinationWidth  = (uint)(pdfPage.Size.Width * 2.0);

            canvas.Width  = renderOptions.DestinationWidth;
            canvas.Height = renderOptions.DestinationHeight;

            if (jpgFile != null)
            {
                IRandomAccessStream randomStream = await jpgFile.OpenAsync(FileAccessMode.ReadWrite);

                await pdfPage.RenderToStreamAsync(randomStream, renderOptions);

                await randomStream.FlushAsync();

                randomStream.Dispose();
                pdfPage.Dispose();
                //await DisplayImageFileAsync(jpgFile);
            }

            SoftwareBitmap softwareBitmap;
            BitmapImage    image           = new BitmapImage();
            ImageBrush     backgroundBrush = new ImageBrush();

            using (IRandomAccessStream stream = await jpgFile.OpenAsync(FileAccessMode.Read))
            {
                // Create the decoder from the stream
                BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

                image.SetSource(stream);
                backgroundBrush.ImageSource = image;

                // Get the SoftwareBitmap representation of the file
                softwareBitmap = await decoder.GetSoftwareBitmapAsync();
            }

            OcrEngine engine = OcrEngine.TryCreateFromUserProfileLanguages();
            var       result = await engine.RecognizeAsync(softwareBitmap);

            textBox.Text = "";
            var left = canvas.GetValue(Canvas.LeftProperty);
            var top  = canvas.GetValue(Canvas.TopProperty);

            foreach (OcrLine line in result.Lines)
            {
                Rectangle lineRect = new Rectangle();

                textBox.Text += line.Text + "\r\n";
                foreach (OcrWord word in line.Words)
                {
                    Rectangle r = new Rectangle();
                    r.Margin = new Thickness(word.BoundingRect.Left, word.BoundingRect.Top, 0.0, 0.0);
                    r.Width  = word.BoundingRect.Width;
                    r.Height = word.BoundingRect.Height;
                    r.Stroke = new SolidColorBrush(Colors.Blue);
                    canvas.Children.Add(r);
                    canvas.Background = backgroundBrush;
                }
            }

            // Looking for line "Duration of agreement"
            for (int i = 0; i < result.Lines.Count; i++)
            {
                OcrLine line = result.Lines[i];

                if (line.Text.Contains("Customer Name"))
                {
                    clientName.Text = line.Text.Substring(13);
                    //MessageDialog dlg = new MessageDialog(clientName.Text);
                    //await dlg.ShowAsync();
                }
            }

            sw.Stop();
            MessageDialog md = new MessageDialog(string.Format("Page processed in {0} milliseconds", sw.ElapsedMilliseconds));
            await md.ShowAsync();
        }
Beispiel #25
0
        public async Task RecognizeImage()
        {
            // Nên là Recog image

            // Hình ảnh muốn lấy được text phải dạng gray scale
            //WriteableBitmap temp = ImageBehavior.GrayScale(this.Image);
            WriteableBitmap temp = ImageBehavior.GrayScale(this.RecognizedImage);

            this.StandardImageForRecognize();

            // Bắt đầu tính toán nhận diện chữ.
            // Pixel Width / Height phải trong khoảng 40 đến 2600
            OcrResult result = null;

            try
            {
                result = await _ocrEngine.RecognizeAsync((uint)temp.PixelHeight,
                                                         (uint)temp.PixelWidth,
                                                         temp.PixelBuffer.ToArray());
            }
            catch (Exception msg)
            {
                Debug.WriteLine(msg);
                return;
            }

            try
            {
                if (result.Lines != null)
                {
                    foreach (var item in result.Lines)
                    {
                        if (item.Words == null)
                        {
                            continue;
                        }

                        foreach (var word in item.Words)
                        {
                            if (word.Text == null)
                            {
                                continue;
                            }

                            Rect bound = new Rect()
                            {
                                X      = word.Left,
                                Y      = word.Top,
                                Width  = word.Width,
                                Height = word.Height
                            };

                            CharacterRecognizeModel.PairWords.Add(
                                new KeyValuePair <string, Rect>(word.Text, bound));
#if DEBUG
                            Debug.WriteLine(word.Text);
#endif
                            // WriteableBitmap.DrawRectangle là phương thức mở rộng từ lớp WriteableBitmapExtension. của thư viện WriteableEx

                            this.RecognizedImage.DrawRectangle(
                                (int)bound.Left,
                                (int)bound.Top,
                                (int)bound.Right,
                                (int)bound.Bottom,
                                Windows.UI.Color.FromArgb(255, 110, 210, 255));
                        }
                    }
                }
            }
            catch (Exception msg)
            {
                // Khi không nhận được ảnh thì quăng lỗi nên catch để tránh bị break
                Debug.WriteLine(msg);
            }
        }
Beispiel #26
0
        private async void ProcessFrame(ThreadPoolTimer timer)
        {
            if (!semaphore.Wait(0))
            {
                return;
            }
            else
            {
                if (_mediaCapture != null)
                {
                    if (_mediaCapture.CameraStreamState == CameraStreamState.Streaming)
                    {
                        var previewProperties = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
                        int videoFrameWidth   = (int)previewProperties.Width;
                        int videoFrameHeight  = (int)previewProperties.Height;

                        // In portrait modes, the width and height must be swapped for the VideoFrame to have the correct aspect ratio and avoid letterboxing / black bars.
                        await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        {
                            if ((_displayInformation.CurrentOrientation == DisplayOrientations.Portrait || _displayInformation.CurrentOrientation == DisplayOrientations.PortraitFlipped))
                            {
                                videoFrameWidth  = (int)previewProperties.Height;
                                videoFrameHeight = (int)previewProperties.Width;
                            }
                        });

                        if (_mediaCapture != null)
                        {
                            // Create the video frame to request a SoftwareBitmap preview frame.
                            var videoFrame = new VideoFrame(BitmapPixelFormat.Bgra8, videoFrameWidth, videoFrameHeight);
                            if (_mediaCapture.CameraStreamState == CameraStreamState.Streaming)
                            {
                                using (VideoFrame vf = await _mediaCapture.GetPreviewFrameAsync(videoFrame))
                                {
                                    SoftwareBitmap bitmap = vf.SoftwareBitmap;

                                    OcrEngine ocrEngine = OcrEngine.TryCreateFromLanguage(new Windows.Globalization.Language("en-US"));

                                    if (ocrEngine != null)
                                    {
                                        await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
                                        {
                                            var imgSource = new WriteableBitmap(bitmap.PixelWidth, bitmap.PixelHeight);
                                            bitmap.CopyToBuffer(imgSource.PixelBuffer);
                                            PreviewImage.Source = imgSource;
                                            await Task.CompletedTask;
                                        });

                                        bool success   = false;
                                        var  ocrResult = await ocrEngine.RecognizeAsync(bitmap);

                                        // Used for text overlay.
                                        // Prepare scale transform for words since image is not displayed in original format.
                                        await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
                                        {
                                            var scaleTrasform = new ScaleTransform
                                            {
                                                CenterX = 0,
                                                CenterY = 0,
                                                ScaleX  = PreviewControl.ActualWidth / bitmap.PixelWidth,
                                                ScaleY  = PreviewControl.ActualHeight / bitmap.PixelHeight
                                            };

                                            if (ocrResult.TextAngle != null)
                                            {
                                                // If text is detected under some angle in this sample scenario we want to
                                                // overlay word boxes over original image, so we rotate overlay boxes.
                                                TextOverlay.RenderTransform = new RotateTransform
                                                {
                                                    Angle   = (double)ocrResult.TextAngle,
                                                    CenterX = PreviewImage.ActualWidth / 2,
                                                    CenterY = PreviewImage.ActualHeight / 2
                                                };
                                            }

                                            if (ocrResult.Lines != null && ocrResult.Lines.Count >= 1)
                                            {
                                                List <int> lstWordCount = new List <int>();
                                                // Iterate over recognized lines of text.
                                                foreach (var line in ocrResult.Lines)
                                                {
                                                    lstWordCount.Add(line.Words.Count);
                                                    // Iterate over words in line.
                                                    foreach (var word in line.Words)
                                                    {
                                                        // Define the TextBlock.
                                                        var wordTextBlock = new TextBlock()
                                                        {
                                                            Text  = word.Text,
                                                            Style = (Style)this.Resources["ExtractedWordTextStyle"]
                                                        };

                                                        WordOverlay wordBoxOverlay = new WordOverlay(word);

                                                        // Keep references to word boxes.
                                                        wordBoxes.Add(wordBoxOverlay);

                                                        // Define position, background, etc.
                                                        var overlay = new Border()
                                                        {
                                                            Child = wordTextBlock,
                                                            Style = (Style)this.Resources["HighlightedWordBoxHorizontalLine"]
                                                        };

                                                        // Bind word boxes to UI.
                                                        overlay.SetBinding(Border.MarginProperty, wordBoxOverlay.CreateWordPositionBinding());
                                                        overlay.SetBinding(Border.WidthProperty, wordBoxOverlay.CreateWordWidthBinding());
                                                        overlay.SetBinding(Border.HeightProperty, wordBoxOverlay.CreateWordHeightBinding());

                                                        // Put the filled textblock in the results grid.
                                                        TextOverlay.Children.Add(overlay);
                                                    }
                                                }

                                                if (isAutoFocusCapable && isAutoShot && lstWordCount.Max() >= 2)
                                                {
                                                    success = true;
                                                    await Task.CompletedTask;
                                                }
                                                else
                                                {
                                                    success = false;
                                                    await Task.CompletedTask;
                                                }
                                            }
                                            else
                                            {
                                                errorMessage.Text = "VIZZoneInFront".GetLocalized();
                                                await Task.CompletedTask;
                                            }
                                        });

                                        if (success)
                                        {
                                            string s = await Dispatcher.RunTaskAsync <string>(MakePhoto);

                                            await Task.CompletedTask;
                                        }
                                    }
                                    else
                                    {
                                        await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
                                        {
                                            errorMessage.Text = "OCRLanguage".GetLocalized();
                                            await Task.CompletedTask;
                                        });

                                        await Task.CompletedTask;
                                    }
                                }
                            }
                        }
                    }
                }
                semaphore.Release();
            }
        }
Beispiel #27
0
        /// <summary>
        ///     Attempts to determine the <see cref="BitmapRotation"/> needed to fix the orientation of the given
        ///     <paramref name="imageFile"/>.
        /// </summary>
        public async Task <BitmapRotation> TryGetRecommendedRotationAsync(StorageFile imageFile, ImageScannerFormat format)
        {
            try
            {
                if (OcrEngine != null)
                {
                    // get separate stream
                    using (IRandomAccessStream sourceStream = await imageFile.OpenAsync(FileAccessMode.Read))
                    {
                        BitmapDecoder decoder = await BitmapDecoder.CreateAsync(sourceStream);

                        Tuple <BitmapRotation, int> bestRotation;

                        // create rotated 0°
                        SoftwareBitmap bitmap = await decoder.GetSoftwareBitmapAsync();

                        OcrResult ocrResult = await OcrEngine.RecognizeAsync(bitmap);

                        bestRotation = new Tuple <BitmapRotation, int>(BitmapRotation.None, ocrResult.Text.Length);

                        using (InMemoryRandomAccessStream targetStream = new InMemoryRandomAccessStream())
                        {
                            // create rotated 90°
                            BitmapEncoder encoder = await HelperService.CreateOptimizedBitmapEncoderAsync(format, targetStream);

                            encoder.SetSoftwareBitmap(bitmap);
                            encoder.BitmapTransform.Rotation = BitmapRotation.Clockwise90Degrees;
                            await encoder.FlushAsync();

                            decoder = await BitmapDecoder.CreateAsync(targetStream);

                            bitmap = await decoder.GetSoftwareBitmapAsync();

                            ocrResult = await OcrEngine.RecognizeAsync(bitmap);

                            if (ocrResult.Text.Length > bestRotation.Item2)
                            {
                                bestRotation = new Tuple <BitmapRotation, int>(BitmapRotation.Clockwise90Degrees, ocrResult.Text.Length);
                            }
                        }

                        using (InMemoryRandomAccessStream targetStream = new InMemoryRandomAccessStream())
                        {
                            // create rotated 180°
                            BitmapEncoder encoder = await HelperService.CreateOptimizedBitmapEncoderAsync(format, targetStream);

                            encoder.SetSoftwareBitmap(bitmap);
                            encoder.BitmapTransform.Rotation = BitmapRotation.Clockwise90Degrees;
                            await encoder.FlushAsync();

                            decoder = await BitmapDecoder.CreateAsync(targetStream);

                            bitmap = await decoder.GetSoftwareBitmapAsync();

                            ocrResult = await OcrEngine.RecognizeAsync(bitmap);

                            if (ocrResult.Text.Length > bestRotation.Item2)
                            {
                                bestRotation = new Tuple <BitmapRotation, int>(BitmapRotation.Clockwise180Degrees, ocrResult.Text.Length);
                            }
                        }

                        using (InMemoryRandomAccessStream targetStream = new InMemoryRandomAccessStream())
                        {
                            // create rotated 270°
                            BitmapEncoder encoder = await HelperService.CreateOptimizedBitmapEncoderAsync(format, targetStream);

                            encoder.SetSoftwareBitmap(bitmap);
                            encoder.BitmapTransform.Rotation = BitmapRotation.Clockwise90Degrees;
                            await encoder.FlushAsync();

                            decoder = await BitmapDecoder.CreateAsync(targetStream);

                            bitmap = await decoder.GetSoftwareBitmapAsync();

                            ocrResult = await OcrEngine.RecognizeAsync(bitmap);

                            if (ocrResult.Text.Length > bestRotation.Item2)
                            {
                                bestRotation = new Tuple <BitmapRotation, int>(BitmapRotation.Clockwise270Degrees, ocrResult.Text.Length);
                            }
                        }

                        bitmap.Dispose();

                        if (bestRotation.Item2 < MinimumNumberOfWords)
                        {
                            // very low confidence, could just be random patterns
                            return(BitmapRotation.None);
                        }
                        else
                        {
                            return(bestRotation.Item1);
                        }
                    }
                }
                else
                {
                    return(BitmapRotation.None);
                }
            }
            catch (Exception exc)
            {
                LogService?.Log.Error(exc, "Determining the recommended rotation failed.");
                AppCenterService?.TrackError(exc);
                return(BitmapRotation.None);
            }
        }
Beispiel #28
0
        /// <summary>
        /// This is event handler for 'Extract' button.
        /// Recognizes text from image and displays it.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void ExtractButton_Click(object sender, RoutedEventArgs e)
        {
            ClearResults();

            // Check if OcrEngine supports image resolution.
            if (bitmap.PixelWidth > OcrEngine.MaxImageDimension || bitmap.PixelHeight > OcrEngine.MaxImageDimension)
            {
                rootPage.NotifyUser(
                    String.Format("Bitmap dimensions ({0}x{1}) are too big for OCR.", bitmap.PixelWidth, bitmap.PixelHeight) +
                    "Max image dimension is " + OcrEngine.MaxImageDimension + ".",
                    NotifyType.ErrorMessage);

                return;
            }

            OcrEngine ocrEngine = null;

            if (UserLanguageToggle.IsOn)
            {
                // Try to create OcrEngine for first supported language from UserProfile.GlobalizationPreferences.Languages list.
                // If none of the languages are available on device, method returns null.
                ocrEngine = OcrEngine.TryCreateFromUserProfileLanguages();
            }
            else
            {
                // Try to create OcrEngine for specified language.
                // If language is not supported on device, method returns null.
                ocrEngine = OcrEngine.TryCreateFromLanguage(LanguageList.SelectedValue as Language);
            }

            if (ocrEngine != null)
            {
                // Recognize text from image.
                var ocrResult = await ocrEngine.RecognizeAsync(bitmap);

                // Display recognized text.
                ExtractedTextBox.Text = ocrResult.Text;

                if (ocrResult.TextAngle != null)
                {
                    // If text is detected under some angle in this sample scenario we want to
                    // overlay word boxes over original image, so we rotate overlay boxes.
                    TextOverlay.RenderTransform = new RotateTransform
                    {
                        Angle   = (double)ocrResult.TextAngle,
                        CenterX = PreviewImage.ActualWidth / 2,
                        CenterY = PreviewImage.ActualHeight / 2
                    };
                }

                // Create overlay boxes over recognized words.
                foreach (var line in ocrResult.Lines)
                {
                    // Determine if line is horizontal or vertical.
                    // Vertical lines are supported only in Chinese Traditional and Japanese languages.
                    Rect lineRect = Rect.Empty;
                    foreach (var word in line.Words)
                    {
                        lineRect.Union(word.BoundingRect);
                    }
                    bool isVerticalLine = lineRect.Height > lineRect.Width;
                    var  style          = isVerticalLine ? HighlightedWordBoxVerticalLineStyle : HighlightedWordBoxHorizontalLineStyle;

                    foreach (var word in line.Words)
                    {
                        WordOverlay wordBoxOverlay = new WordOverlay(word);

                        // Keep references to word boxes.
                        wordBoxes.Add(wordBoxOverlay);

                        // Create a box to highlight the word.
                        TextOverlay.Children.Add(wordBoxOverlay.CreateBorder(style));
                    }
                }

                // Rescale word boxes to match current UI size.
                UpdateWordBoxTransform();

                rootPage.NotifyUser(
                    "Image is OCRed for " + ocrEngine.RecognizerLanguage.DisplayName + " language.",
                    NotifyType.StatusMessage);
            }
            else
            {
                rootPage.NotifyUser("Selected language is not available.", NotifyType.ErrorMessage);
            }
        }
Beispiel #29
0
        protected async override void OnLaunched(LaunchActivatedEventArgs e)
        {
            StorageFolder folder = Windows.Storage.ApplicationData.Current.LocalFolder;

            if (!string.IsNullOrEmpty(e.Arguments))
            {
                try
                {
                    Debug.WriteLine("args: " + e.Arguments);
                    var ocrEngine = new OcrEngine(OcrLanguage.English);
                    var file      = await folder.GetFileAsync(e.Arguments);

                    ImageProperties imgProp = await file.Properties.GetImagePropertiesAsync();

                    WriteableBitmap bitmap;
                    using (var imgStream = await file.OpenAsync(FileAccessMode.Read))
                    {
                        bitmap = new WriteableBitmap((int)imgProp.Width, (int)imgProp.Height);
                        bitmap.SetSource(imgStream);
                    }
                    // Check whether is loaded image supported for processing.
                    // Supported image dimensions are between 40 and 2600 pixels.
                    if (bitmap.PixelHeight < 40 ||
                        bitmap.PixelHeight > 2600 ||
                        bitmap.PixelWidth < 40 ||
                        bitmap.PixelWidth > 2600)
                    {
                        //write invalid image to output

                        return;
                    }

                    // This main API call to extract text from image.
                    var ocrResult = await ocrEngine.RecognizeAsync((uint)bitmap.PixelHeight, (uint)bitmap.PixelWidth, bitmap.PixelBuffer.ToArray());

                    // OCR result does not contain any lines, no text was recognized.
                    if (ocrResult.Lines != null)
                    {
                        bool       hasVerticalLines = false;
                        JsonObject jsonOjbect       = new JsonObject();
                        jsonOjbect.Add("text_angle", JsonValue.CreateNumberValue(ocrResult.TextAngle.HasValue ? ocrResult.TextAngle.Value : 0d));
                        string extractedText = "";

                        JsonArray wordsArray = new JsonArray();
                        jsonOjbect.Add("words", wordsArray);

                        // Iterate over recognized lines of text.
                        foreach (var line in ocrResult.Lines)
                        {
                            hasVerticalLines |= line.IsVertical;
                            JsonArray lineArray = new JsonArray();
                            foreach (var word in line.Words)
                            {
                                JsonObject wordJson = new JsonObject();
                                wordsArray.Add(wordJson);
                                wordJson.Add("top", JsonValue.CreateNumberValue(word.Top));
                                wordJson.Add("left", JsonValue.CreateNumberValue(word.Left));
                                wordJson.Add("width", JsonValue.CreateNumberValue(word.Width));
                                wordJson.Add("height", JsonValue.CreateNumberValue(word.Height));
                                wordJson.Add("text", JsonValue.CreateStringValue(word.Text));

                                extractedText += word.Text + " ";
                            }
                            extractedText += line.Words.Select(w => w.Text).Aggregate((w1, w2) => w1 + " " + w2);
                            extractedText += '\n';
                        }
                        jsonOjbect.Add("text", JsonValue.CreateStringValue(extractedText));
                        jsonOjbect.Add("has_vertical_line", JsonValue.CreateBooleanValue(hasVerticalLines));
                        await WriteToFile(folder, file.Name + ".txt", jsonOjbect.Stringify());
                    }
                    else
                    {
                        await WriteToFile(folder, "failed.txt", "No Text");
                    }
                }
                catch (Exception ex)
                {
                    await WriteToFile(folder, "failed.txt", ex.Message + "\r\n" + ex.StackTrace);
                }
                App.Current.Exit();
            }
        }
Beispiel #30
0
        private async void Ocr_OnClick(object sender, RoutedEventArgs e)
        {
            _drawTool?.Close();
            ClearResults();

            Rectangle    itemsRectangle    = _viewModel.ItemsRectangle;
            Rectangle    quantityRectangle = _viewModel.QuantityRectangle;
            BitmapSource bitmapSource      = _viewModel.ImgSource;

            if (bitmapSource == null || itemsRectangle == null || quantityRectangle == null)
            {
                return;
            }

            // Check if OcrEngine supports image resoulution.
            if (bitmapSource.PixelWidth > OcrEngine.MaxImageDimension ||
                bitmapSource.PixelHeight > OcrEngine.MaxImageDimension)
            {
                return;
            }

            OcrEngine ocrEngine = OcrEngine.TryCreateFromUserProfileLanguages();

            if (ocrEngine == null)
            {
                // Display message to user!
                return;
            }

            OcrResult ocrResult = await ocrEngine.RecognizeAsync(_viewModel.Bitmap);

            // Display recognized text.
            //ExtractedTextBox.Text = ocrResult.Text;

            if (ocrResult.TextAngle != null)
            {
                // If text is detected under some angle in this sample scenario we want to
                // overlay word boxes over original image, so we rotate overlay boxes.
                OverlayCanvas.RenderTransform = new RotateTransform
                {
                    Angle   = (double)ocrResult.TextAngle,
                    CenterX = PreviewImage.ActualWidth / 2,
                    CenterY = PreviewImage.ActualHeight / 2
                };
            }

            ScaleTransform scaleTrasform = ScaleTrasform();

            var itemsRect = new Rect(Canvas.GetLeft(itemsRectangle), Canvas.GetTop(itemsRectangle), itemsRectangle.Width,
                                     itemsRectangle.Height);

            var quantityRect = new Rect(Canvas.GetLeft(quantityRectangle), Canvas.GetTop(quantityRectangle), quantityRectangle.Width,
                                        quantityRectangle.Height);


            var items      = new List <OcrWord>();
            var quantities = new List <OcrWord>();

            foreach (OcrLine line in ocrResult.Lines)
            {
                //bool isHorizontal = IsHorizontal(line);

                foreach (OcrWord word in line.Words)
                {
                    Rect rect = scaleTrasform.TransformBounds(word.BoundingRect);

                    Rect intersectRect1 = itemsRect;
                    intersectRect1.Intersect(rect);

                    Rect intersectRect2 = quantityRect;
                    intersectRect2.Intersect(rect);

                    bool itemsWord    = intersectRect1 == rect;
                    bool quantityWord = intersectRect2 == rect;

                    if (!itemsWord && !quantityWord)
                    {
                        continue;
                    }

                    var overlay = new Rectangle
                    {
                        Fill   = new SolidColorBrush(Color.FromArgb(125, 0, 0, 255)),
                        Width  = rect.Width,
                        Height = rect.Height,
                        Tag    = word
                    };
                    Canvas.SetTop(overlay, rect.Top);
                    Canvas.SetLeft(overlay, rect.Left);
                    OverlayCanvas.Children.Add(overlay);

                    if (itemsWord)
                    {
                        items.Add(word);
                    }
                    else
                    {
                        quantities.Add(word);
                    }
                }

                _lines.Add(line);
            }

            _viewModel.Items      = items;
            _viewModel.Quantities = quantities;
        }
        private async void grdScan_Tapped(object sender, TappedRoutedEventArgs e)
        {
            try
            {
                // Prevent another OCR request, since only image can be processed at the time at same OCR engine instance.
                grdScan.IsTapEnabled = false;
                btnCapture.IsEnabled = false;
                string extractedText = "";

                //From stream to WriteableBitmap
                wb = await StorageFileToWriteableBitmap(file);

                int fixedSize = (wb.PixelHeight < wb.PixelWidth) ? wb.PixelWidth : wb.PixelHeight;

                //// Get the size of the image when it is displayed on the phone
                double displayedWidth  = imgCapped.ActualWidth;
                double displayedHeight = imgCapped.ActualHeight;

                double fixedDisplay = (displayedHeight < displayedWidth) ? displayedWidth : displayedHeight;

                double ratio = fixedSize / fixedDisplay;

                wb = rectCrop.CroppedImage;

                //PREPROCESSING
                byte[] arrImg = ImageClass.ConvertBitmapToByteGray(wb);
                matrixImage = Converter.ByteArrayToMatrix(arrImg, wb.PixelWidth, 4);
                matrixImage = ImageClass.ConvolutionFilter(matrixImage, ImageClass.maskSharp1, 1);

                //THRESHOLDING
                int otsuT = ImageClass.GetOtsuThreshold(matrixImage);
                matrixImage = ImageClass.OtsuProcessed(matrixImage, otsuT);
                arrImg      = Converter.MatrixToByteArray(matrixImage);

                wb = ImageClass.ConvertByteArrayToBitmap(arrImg, wb.PixelWidth);

                {
                    // Check whether is loaded image supported for processing.
                    // Supported image dimensions are between 40 and 2600 pixels.
                    if (wb.PixelHeight < 40 ||
                        wb.PixelHeight > 2600 ||
                        wb.PixelWidth < 40 ||
                        wb.PixelWidth > 2600)
                    {
                        MessageDialog dialog = new MessageDialog("Image size is not supported." +
                                                                 Environment.NewLine +
                                                                 "Loaded image size is " + wb.PixelWidth + "x" + wb.PixelHeight + "." +
                                                                 Environment.NewLine +
                                                                 "Supported image dimensions are between 40 and 2600 pixels.");
                        await dialog.ShowAsync();

                        return;
                    }

                    Debug.WriteLine(-1);
                    // This main API call to extract text from image.
                    var ocrResult = await ocrEngine.RecognizeAsync((uint)wb.PixelHeight, (uint)wb.PixelWidth, wb.PixelBuffer.ToArray());

                    // OCR result does not contain any lines, no text was recognized.
                    if (ocrResult.Lines != null)
                    {
                        // Used for text overlay.
                        // Prepare scale transform for words since image is not displayed in original format.
                        var scaleTrasform = new ScaleTransform
                        {
                            CenterX = 0,
                            CenterY = 0,
                            ScaleX  = imgCapped.ActualWidth / wb.PixelWidth,
                            ScaleY  = imgCapped.ActualHeight / wb.PixelHeight,
                        };

                        if (ocrResult.TextAngle != null)
                        {
                            imgCapped.RenderTransform = new RotateTransform
                            {
                                Angle   = (double)ocrResult.TextAngle,
                                CenterX = imgCapped.ActualWidth / 2,
                                CenterY = imgCapped.ActualHeight / 2
                            };
                        }

                        Debug.WriteLine(2);
                        // Iterate over recognized lines of text.
                        foreach (var line in ocrResult.Lines)
                        {
                            // Iterate over words in line.
                            foreach (var word in line.Words)
                            {
                                Debug.WriteLine(word.Text);
                                extractedText += word.Text;
                            }
                            break;
                        }
                    }
                    else
                    {
                        extractedText = "";
                    }
                    Frame.Navigate(typeof(CallPage), extractedText);
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }
            finally
            {
                imgCapped.Source     = null;
                grdScan.IsTapEnabled = true;
                btnCapture.IsEnabled = true;
                ocrEngine            = new OcrEngine(OcrLanguage.English);
            }
        }
Beispiel #32
0
        public async Task <OcrResult> Run(SoftwareBitmap bmp)
        {
            var result = await engine.RecognizeAsync(bmp);

            return(result);
        }
Beispiel #33
0
        async Task doOcrTranslate()
        {
            if (brush == null)
            {
                return;
            }

            NotifyUser("Start OCR process", NotifyType.StatusMessage);
            WriteableBitmap bitmap = croppedBitmap;
            // This main API call to extract text from image.
            OcrResult ocrResult = null;

            try
            {
                if (rbSimple.IsChecked == true)
                {
                    ocrResult = await ocrEngineSimpl.RecognizeAsync((uint)bitmap.PixelHeight, (uint)bitmap.PixelWidth, bitmap.PixelBuffer.ToArray());
                }
                else
                {
                    ocrResult = await ocrEngineTraditional.RecognizeAsync((uint)bitmap.PixelHeight, (uint)bitmap.PixelWidth, bitmap.PixelBuffer.ToArray());
                }
            }
            catch (Exception ocrE)
            {
                NotifyUser("OCR error:" + ocrE, NotifyType.ErrorMessage);
            }

            // OCR result does not contain any lines, no text was recognized.
            if (ocrResult != null && ocrResult.Lines != null)
            {
                string extractedText = "";

                // Iterate over recognized lines of text.
                foreach (var line in ocrResult.Lines)
                {
                    foreach (var v in line.Words)
                    {
                        extractedText += v.Text;
                    }
                    extractedText += Environment.NewLine;
                }

                NotifyUser("OCR success:", NotifyType.StatusMessage);
                //	txtOCR.Text = extractedText;
                txtbox111.Text = extractedText;
                try
                {
                    txtPin.Text = ChineseToPinYin.Convert3Pin(extractedText);
                }
                catch (Exception pinEx)
                {
                    txtPin.Text = "Error during converting to PinYin";
                }

                NotifyUser("Start translation", NotifyType.StatusMessage);
                if (cbTranslate.IsChecked == true)
                {
                    DoTranslate();
                }
            }
            else
            {
                //	txtOCR.Text = "No text.";
                txtbox111.Text      = "No text.";
                txtPin.Text         = "No text.";
                txtTranslation.Text = "No text.";
                NotifyUser("No text recognized", NotifyType.ErrorMessage);
            }
        }
Beispiel #34
0
        protected async override void OnLaunched(LaunchActivatedEventArgs e)
        {
            var folder = Windows.Storage.ApplicationData.Current.LocalFolder;

            if (!string.IsNullOrEmpty(e.Arguments))
            {
                try
                {
                    await WriteToFile(folder, "arg.txt", e.Arguments);

                    var ocrEngine = new OcrEngine(OcrLanguage.English);
                    var file      = await folder.GetFileAsync(e.Arguments);

                    ImageProperties imgProp = await file.Properties.GetImagePropertiesAsync();

                    WriteableBitmap bitmap;
                    using (var imgStream = await file.OpenAsync(FileAccessMode.Read))
                    {
                        bitmap = new WriteableBitmap((int)imgProp.Width, (int)imgProp.Height);
                        bitmap.SetSource(imgStream);
                    }
                    // Check whether is loaded image supported for processing.
                    // Supported image dimensions are between 40 and 2600 pixels.
                    if (bitmap.PixelHeight < 40 ||
                        bitmap.PixelHeight > 2600 ||
                        bitmap.PixelWidth < 40 ||
                        bitmap.PixelWidth > 2600)
                    {
                        //write invalid image to output

                        return;
                    }

                    // This main API call to extract text from image.
                    var ocrResult = await ocrEngine.RecognizeAsync((uint)bitmap.PixelHeight, (uint)bitmap.PixelWidth, bitmap.PixelBuffer.ToArray());

                    // OCR result does not contain any lines, no text was recognized.
                    if (ocrResult.Lines != null)
                    {
                        string extractedText = "";

                        // Iterate over recognized lines of text.
                        foreach (var line in ocrResult.Lines)
                        {
                            // Iterate over words in line.
                            foreach (var word in line.Words)
                            {
                                extractedText += word.Text + " ";
                            }
                            extractedText += Environment.NewLine;
                        }
                        await WriteToFile(folder, file.Name + ".txt", extractedText);
                    }
                    else
                    {
                        WriteToFile(folder, "failed.txt", "No Text");
                    }
                }
                catch (Exception ex)
                {
                    WriteToFile(folder, "failed.txt", ex.Message + "\r\n" + ex.StackTrace);
                }
                App.Current.Exit();
            }
        }
Beispiel #35
0
        private async void RecognizeOCR()
        {
            ClearResults();

            if (originalBitmap == null)
            {
                NotifyUser("Please open image file first!", NotifyType.ErrorMessage);
                return;
            }

            // Check if OcrEngine supports image resoulution.
            if (originalBitmap.PixelWidth > OcrEngine.MaxImageDimension || originalBitmap.PixelHeight > OcrEngine.MaxImageDimension)
            {
                this.NotifyUser(
                    String.Format("Bitmap dimensions ({0}x{1}) are too big for OCR.", originalBitmap.PixelWidth, originalBitmap.PixelHeight) +
                    "Max image dimension is " + OcrEngine.MaxImageDimension + ".",
                    NotifyType.ErrorMessage);

                return;
            }

            OcrEngine ocrEngine = null;

            if (UserLanguageToggle.IsOn)
            {
                // Try to create OcrEngine for first supported language from UserProfile.GlobalizationPreferences.Languages list.
                // If none of the languages are available on device, method returns null.
                ocrEngine = OcrEngine.TryCreateFromUserProfileLanguages();
            }
            else
            {
                // Try to create OcrEngine for specified language.
                // If language is not supported on device, method returns null.
                ocrEngine = OcrEngine.TryCreateFromLanguage(LanguageList.SelectedValue as Language);
            }

            if (ocrEngine != null)
            {
                // Recognize text from image.
                var ocrResult = await ocrEngine.RecognizeAsync(originalBitmap);

                // Display recognized text.
                ExtractedTextBox.Text = ocrResult.Text;

                if (ocrResult.TextAngle != null)
                {
                    // If text is detected under some angle in this sample scenario we want to
                    // overlay word boxes over original image, so we rotate overlay boxes.
                    TextOverlay.RenderTransform = new RotateTransform
                    {
                        Angle   = (double)ocrResult.TextAngle,
                        CenterX = PhotoCanvas.ActualWidth / 2,
                        CenterY = PhotoCanvas.ActualHeight / 2
                    };
                }

                // Create overlay boxes over recognized words.
                foreach (var line in ocrResult.Lines)
                {
                    Rect lineRect = Rect.Empty;
                    foreach (var word in line.Words)
                    {
                        lineRect.Union(word.BoundingRect);
                    }

                    // Determine if line is horizontal or vertical.
                    // Vertical lines are supported only in Chinese Traditional and Japanese languages.
                    bool isVerticalLine = lineRect.Height > lineRect.Width;

                    foreach (var word in line.Words)
                    {
                        WordOverlay wordBoxOverlay = new WordOverlay(word);

                        // Keep references to word boxes.
                        wordBoxes.Add(wordBoxOverlay);

                        // Define overlay style.
                        var overlay = new Border()
                        {
                            Style = isVerticalLine ?
                                    (Style)Resources["HighlightedWordBoxVerticalLine"] :
                                    (Style)Resources["HighlightedWordBoxHorizontalLine"]
                        };

                        // Bind word boxes to UI.
                        overlay.SetBinding(MarginProperty, wordBoxOverlay.CreateWordPositionBinding());
                        overlay.SetBinding(WidthProperty, wordBoxOverlay.CreateWordWidthBinding());
                        overlay.SetBinding(HeightProperty, wordBoxOverlay.CreateWordHeightBinding());

                        // Put the filled textblock in the results grid.
                        TextOverlay.Children.Add(overlay);
                    }
                }

                // Rescale word boxes to match current UI size.
                UpdateWordBoxTransform();

                this.NotifyUser(
                    "Found " + ocrResult.Lines.Count + " lines of text, and " + wordBoxes.Count + " words.",
                    NotifyType.StatusMessage);

                Speak(ocrResult.Text);
            }
            else
            {
                this.NotifyUser("Selected language is not available.", NotifyType.ErrorMessage);
            }
        }
        /// <summary>
        /// This is event handler for 'Extract' button.
        /// Captures image from camera ,recognizes text and displays it.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void ExtractButton_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e)
        {
            //Get information about the preview.
            var previewProperties = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
            int videoFrameWidth   = (int)previewProperties.Width;
            int videoFrameHeight  = (int)previewProperties.Height;

            // In portrait modes, the width and height must be swapped for the VideoFrame to have the correct aspect ratio and avoid letterboxing / black bars.
            if (!externalCamera && (displayInformation.CurrentOrientation == DisplayOrientations.Portrait || displayInformation.CurrentOrientation == DisplayOrientations.PortraitFlipped))
            {
                videoFrameWidth  = (int)previewProperties.Height;
                videoFrameHeight = (int)previewProperties.Width;
            }

            // Create the video frame to request a SoftwareBitmap preview frame.
            var videoFrame = new VideoFrame(BitmapPixelFormat.Bgra8, videoFrameWidth, videoFrameHeight);

            // Capture the preview frame.
            using (var currentFrame = await mediaCapture.GetPreviewFrameAsync(videoFrame))
            {
                // Collect the resulting frame.
                SoftwareBitmap bitmap = currentFrame.SoftwareBitmap;

                OcrEngine ocrEngine = OcrEngine.TryCreateFromLanguage(ocrLanguage);

                if (ocrEngine == null)
                {
                    rootPage.NotifyUser(ocrLanguage.DisplayName + " is not supported.", NotifyType.ErrorMessage);

                    return;
                }

                var imgSource = new WriteableBitmap(bitmap.PixelWidth, bitmap.PixelHeight);
                bitmap.CopyToBuffer(imgSource.PixelBuffer);
                PreviewImage.Source = imgSource;

                var ocrResult = await ocrEngine.RecognizeAsync(bitmap);

                // Used for text overlay.
                // Prepare scale transform for words since image is not displayed in original format.
                var scaleTrasform = new ScaleTransform
                {
                    CenterX = 0,
                    CenterY = 0,
                    ScaleX  = PreviewControl.ActualWidth / bitmap.PixelWidth,
                    ScaleY  = PreviewControl.ActualHeight / bitmap.PixelHeight
                };

                if (ocrResult.TextAngle != null)
                {
                    // If text is detected under some angle in this sample scenario we want to
                    // overlay word boxes over original image, so we rotate overlay boxes.
                    TextOverlay.RenderTransform = new RotateTransform
                    {
                        Angle   = (double)ocrResult.TextAngle,
                        CenterX = PreviewImage.ActualWidth / 2,
                        CenterY = PreviewImage.ActualHeight / 2
                    };
                }

                // Iterate over recognized lines of text.
                foreach (var line in ocrResult.Lines)
                {
                    // Iterate over words in line.
                    foreach (var word in line.Words)
                    {
                        WordOverlay wordBoxOverlay = new WordOverlay(word);

                        // Keep references to word boxes.
                        wordBoxes.Add(wordBoxOverlay);

                        // Create a box with the word inside it.
                        var textBlock = new TextBlock()
                        {
                            Text  = word.Text,
                            Style = ExtractedWordTextStyle
                        };
                        TextOverlay.Children.Add(wordBoxOverlay.CreateBorder(HighlightedWordBoxHorizontalLineStyle, textBlock));
                    }
                }

                rootPage.NotifyUser("Image processed using " + ocrEngine.RecognizerLanguage.DisplayName + " language.", NotifyType.StatusMessage);
            }

            UpdateWordBoxTransform();

            PreviewControl.Visibility = Visibility.Collapsed;
            Image.Visibility          = Visibility.Visible;
            ExtractButton.Visibility  = Visibility.Collapsed;
            CameraButton.Visibility   = Visibility.Visible;
        }
        protected async override void OnLaunched(LaunchActivatedEventArgs e)
        {
            var folder = Windows.Storage.ApplicationData.Current.LocalFolder;
            if (!string.IsNullOrEmpty(e.Arguments))
            {
                try
                {
                    await WriteToFile(folder,"arg.txt",e.Arguments);
                    var ocrEngine = new OcrEngine(OcrLanguage.English);
                    var file = await folder.GetFileAsync(e.Arguments);
                    ImageProperties imgProp = await file.Properties.GetImagePropertiesAsync();
                    WriteableBitmap bitmap;
                    using (var imgStream = await file.OpenAsync(FileAccessMode.Read))
                    {
                        bitmap = new WriteableBitmap((int)imgProp.Width, (int)imgProp.Height);
                        bitmap.SetSource(imgStream);
                    }
                    // Check whether is loaded image supported for processing.
                    // Supported image dimensions are between 40 and 2600 pixels.
                    if (bitmap.PixelHeight < 40 ||
                        bitmap.PixelHeight > 2600 ||
                        bitmap.PixelWidth < 40 ||
                        bitmap.PixelWidth > 2600)
                    {
                        //write invalid image to output

                        return;
                    }

                    // This main API call to extract text from image.
                    var ocrResult = await ocrEngine.RecognizeAsync((uint)bitmap.PixelHeight, (uint)bitmap.PixelWidth, bitmap.PixelBuffer.ToArray());

                    // OCR result does not contain any lines, no text was recognized. 
                    if (ocrResult.Lines != null)
                    {
                        string extractedText = "";

                        // Iterate over recognized lines of text.
                        foreach (var line in ocrResult.Lines)
                        {
                            // Iterate over words in line.
                            foreach (var word in line.Words)
                            {
                                extractedText += word.Text + " ";
                            }
                            extractedText += Environment.NewLine;
                        }
                        await WriteToFile(folder, file.Name + ".txt", extractedText);
                    }
                    else
                    {

                        WriteToFile(folder, "failed.txt", "No Text");
                    }

                }
                catch (Exception ex)
                {
                    WriteToFile(folder, "failed.txt", ex.Message + "\r\n"+ex.StackTrace);
                }
                App.Current.Exit();
            }
        }