public static void Run() { // ExStart:ExtractPreprocessedImages // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Initialize an instance of OcrEngine OcrEngine ocrEngine = new OcrEngine(); // Set the Image property by loading the image from file path location or an instance of MemoryStream ocrEngine.Image = ImageStream.FromFile(dataDir + "sample1.jpg"); ocrEngine.Config.SavePreprocessedImages = true; // Process the image if (ocrEngine.Process()) { // Save binarized,filtered image on disc ocrEngine.PreprocessedImages.BinarizedImage.Save(dataDir + "BinarizedImage_out.png", System.Drawing.Imaging.ImageFormat.Png); ocrEngine.PreprocessedImages.FilteredImage.Save(dataDir + "FilteredImage_out.png", System.Drawing.Imaging.ImageFormat.Png); // Save image after removing non-textual fragments, skew correction and textual block detection ocrEngine.PreprocessedImages.NonTextRemovedImage.Save(dataDir + "NonTextRemovedImage_out.png", System.Drawing.Imaging.ImageFormat.Png); ocrEngine.PreprocessedImages.RotatedImage.Save(dataDir + "RotatedImage_out.png", System.Drawing.Imaging.ImageFormat.Png); ocrEngine.PreprocessedImages.TextBlocksImage.Save(dataDir + "TextBlocksImage_out.png", System.Drawing.Imaging.ImageFormat.Png); } // ExEnd:ExtractPreprocessedImages }
internal static ImageStream GetVideoImage(MediaFileVideo mediaAssetVideo, MediaFileGetImageOptions options) { var w = options.Width; var h = options.Height; Bitmap img; if (w == h && w > 0 && w <= 96 && h <= 96 && options.ResizeAspect == MediaFileGetImageOptions.ImageResizeAspect.AspectFill) { img = MediaStore.Video.Thumbnails.GetThumbnail(MediaFileManager.Context.ContentResolver, mediaAssetVideo.Id, VideoThumbnailKind.MicroKind, null); } else if ((w <= 512 && h <= 384) || w <= 0 || h <= 0) { img = MediaStore.Video.Thumbnails.GetThumbnail(MediaFileManager.Context.ContentResolver, mediaAssetVideo.Id, VideoThumbnailKind.MiniKind, null); } else { img = Android.Media.ThumbnailUtils.CreateVideoThumbnail(mediaAssetVideo.Uri, ThumbnailKind.FullScreenKind); } using (var finalImg = ApplyImageOptions(img, MediaFileImageOrientation.Up, options, w, h)) using (var stream = new MemoryStream()) { finalImg.Compress(Bitmap.CompressFormat.Jpeg, options.Quality, stream); stream.Position = 0; return(ImageStream.FromStream(stream, finalImg.Width, finalImg.Height)); } }
internal static ImageStream GetImage(MediaFileImage mediaAssetImage, MediaFileGetImageOptions options) { var w = options.Width; var h = options.Height; Bitmap img; if (w == h && w > 0 && w <= 96 && h <= 96 && options.ResizeAspect == MediaFileGetImageOptions.ImageResizeAspect.AspectFill) { img = MediaStore.Images.Thumbnails.GetThumbnail(MediaFileManager.Context.ContentResolver, mediaAssetImage.Id, ThumbnailKind.MicroKind, null); } else if (w > 0 && w <= 512 && h > 0 && h <= 384) { img = MediaStore.Images.Thumbnails.GetThumbnail(MediaFileManager.Context.ContentResolver, mediaAssetImage.Id, ThumbnailKind.MiniKind, null); } else { img = BitmapFactory.DecodeFile(mediaAssetImage.Uri); } using (var finalImg = ApplyImageOptions(img, mediaAssetImage.Orientation, options, w, h)) using (var stream = new MemoryStream()) { finalImg.Compress(Bitmap.CompressFormat.Jpeg, options.Quality, stream); stream.Position = 0; return(ImageStream.FromStream(stream, finalImg.Width, finalImg.Height)); } }
public static void Run() { // ExStart:ApplyingCorrectionFilters // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Initialize an instance of OcrEngine OcrEngine ocrEngine = new OcrEngine(); // Set Image property by loading an image from file path ocrEngine.Image = ImageStream.FromFile(dataDir + "Sampleocr.bmp"); // Create CorrectionFilters collection CorrectionFilters filters = new CorrectionFilters(); Filter filter = null; // Initialize Median filter filter = new MedianFilter(5); filters.Add(filter); // Create Gaussian Blur filter filter = new GaussBlurFilter(); filters.Add(filter); // Assign collection to OcrEngine ocrEngine.Config.CorrectionFilters = filters; // Run recognition process if (ocrEngine.Process()) { // Display results Console.WriteLine(ocrEngine.Text); } // ExEnd:ApplyingCorrectionFilters }
public ImageStream GetChannelImage(string channelId, CancellationToken cancellationToken) { String channelIcon = _channelDataHelper.GetChannelIcon4ChannelId(channelId); WebRequest request = WebRequest.Create("http://" + _tvhServerName + ":" + _httpPort + "/" + channelIcon); string authInfo = _userName + ":" + _password; authInfo = Convert.ToBase64String(Encoding.Default.GetBytes(authInfo)); request.Headers["Authorization"] = "Basic " + authInfo; ImageStream imageStream = new ImageStream(); try { HttpWebResponse httpWebReponse = (HttpWebResponse)request.GetResponse(); imageStream.Stream = httpWebReponse.GetResponseStream(); imageStream.Format = MediaBrowser.Model.Drawing.ImageFormat.Png; } catch (Exception ex) { _logger.Error("[TVHclient] HTSConnectionHandler.GetChannelImage() caught exception: " + ex.Message); } return(imageStream); }
public static void Run() { // ExStart:UsingNotifierFactory // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Initialize an instance of OcrEngine OcrEngine ocrEngine = new OcrEngine(); // Set the Image property by loading the image from file path location or an instance of MemoryStream ocrEngine.Image = ImageStream.FromFile(dataDir + "answers.jpg"); // Get an instance of WordNotifier, Write a delegate to handle the Elapsed event and Display the recognized text on screen INotifier processorBlock = NotifierFactory.BlockNotifier(); processorBlock.Elapsed += delegate { Console.WriteLine(processorBlock.Text); }; // Add the word processor to the OcrEngine and Process the image ocrEngine.AddNotifier(processorBlock); ocrEngine.Process(); // ExEnd:UsingNotifierFactory }
public override void ExecuteResult(ActionContext context) { base.ExecuteResult(context); if (context == null) { throw new ArgumentNullException(nameof(context)); } var buffer = new byte[4096]; var response = context.HttpContext.Response; var shouldReadStream = true; response.StatusCode = (int)HttpStatusCode.OK; response.ContentType = ContentType; response.ContentLength = ImageStream.Length; while (shouldReadStream) { var lastByteRead = ImageStream.Read(buffer, 0, buffer.Length); shouldReadStream = lastByteRead != 0; // NOTE: if last byte was not zero, keep going if (shouldReadStream) { response.Body.Write(buffer, 0, lastByteRead); } } //response.End(); }
public static void Run() { // ExStart:ExtractingText // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Initialize an instance of OcrEngine OcrEngine ocrEngine = new OcrEngine(); // Clear notifier list ocrEngine.ClearNotifies(); // Clear recognition blocks ocrEngine.Config.ClearRecognitionBlocks(); // Add 2 rectangles to user defined recognition blocks ocrEngine.Config.AddRecognitionBlock(RecognitionBlock.CreateTextBlock(27, 63, 34, 38)); // Detecting A ocrEngine.Config.AddRecognitionBlock(RecognitionBlock.CreateTextBlock(209, 111, 28, 34)); // Detecting 6 // Ignore everything else on the image other than the user defined recognition blocks ocrEngine.Config.DetectTextRegions = false; // Set Image property by loading an image from file path ocrEngine.Image = ImageStream.FromFile(dataDir + "sampleocr.bmp"); // Run recognition process if (ocrEngine.Process()) { Console.WriteLine(ocrEngine.Text); } // ExEnd:ExtractingText }
public static void Run() { // ExStart:ReadPartInformation // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OMR(); // Initialize an instance of OcrEngine OcrEngine ocrEngine = new OcrEngine(); // Set Image property by loading an image from file path ocrEngine.Image = ImageStream.FromFile(dataDir + "Sampleocr.bmp"); // Run recognition process if (ocrEngine.Process()) { Console.WriteLine(ocrEngine.Text); // Retrieve an array of recognized text by parts IRecognizedPartInfo[] text = ocrEngine.Text.PartsInfo; // Iterate over the text parts foreach (IRecognizedTextPartInfo symbol in text) { // Display part intomation Console.WriteLine("Text : " + symbol.Text); Console.WriteLine("isItalic : " + symbol.Italic); Console.WriteLine("Language : " + symbol.Language); } } // ExStart:ReadPartInformation }
public static void Run() { // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Create an initialize an instance of OcrEngine OcrEngine engine = new OcrEngine(); // Set the OcrEngine.Image property by loading an image from disk, memory or URL engine.Image = ImageStream.FromFile(dataDir + "Sample.bmp"); // Create text recognition block by supplying X,Y coordinates and Width,Height values IRecognitionBlock block = RecognitionBlock.CreateTextBlock(6, 9, 120, 129); // Set the Whitelist property by specifying a new block whitelist block.Whitelist = new char[] { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' }; // YOU CAN ADD MORE TEXT BLOCK AND SET WHITE LISTS. // Set different configurations and add recognition block(s) engine.Config.ClearRecognitionBlocks(); engine.Config.AddRecognitionBlock(block); engine.Config.DetectTextRegions = false; // Call OcrEngine.Process method to perform OCR operation if (engine.Process()) { // Display the recognized text from each Page Console.WriteLine(engine.Text); } }
public async static void Run([BlobTrigger("kyccontainer/{name}", Connection = "AzureWebJobsStorage")] Stream blob, string name, TraceWriter log) { try { log.Info($"Azure blob function triggered for the passport image: {name}"); // Resolve assembly FunctionsAssemblyResolver.StaticInstance(); // Using AbbyyCloudOCR to extract Passport details RestServiceClient restClient = new RestServiceClient(); // Convert stream into image stream MemoryStream streamOut = ImageStream.GetImageStream(blob); OcrSdkTask task = restClient.ProcessMrz(streamOut); // Get Passport details AbbyyCloudOCRResponse abbyyCloudOCRResponse = restClient.WaitAndGetAbbyyCloudOCRResponse(task); // Using Trulioo API for Passport verification Transaction transaction = await Verify.VerifyPassport(abbyyCloudOCRResponse, streamOut); // Save the AbbyyCloudOCR response and transaction response into the database DataLayer.KYCDbContext.Save(abbyyCloudOCRResponse, transaction); } catch (Exception ex) { log.Error(ex.Message, ex, "KYC blob function"); } }
public override void ExecuteResult(ControllerContext context) { if (this.ImageData == null && ImageStream == null) { throw new ArgumentNullException("ImageData or ImageStream"); } if (string.IsNullOrEmpty(this.MimeType)) { throw new ArgumentNullException("MimeType"); } context.HttpContext.Response.ContentType = this.MimeType; if (!string.IsNullOrEmpty(this.ETag)) { context.HttpContext.Response.Cache.SetETag(this.ETag); } if (this.Expires.HasValue) { context.HttpContext.Response.Cache.SetCacheability(this.Cacheability); context.HttpContext.Response.Cache.SetExpires(this.Expires.Value); } if (ImageStream != null) { ImageData = ImageStream.ToArray(); } context.HttpContext.Response.BinaryWrite(this.ImageData); context.HttpContext.Response.Flush(); }
public ImageStream GetChannelImage(string channelId, CancellationToken cancellationToken) { try { _logger.Info("[TVHclient] HTSConnectionHandler.GetChannelImage() channelId: " + channelId); String channelIcon = _channelDataHelper.GetChannelIcon4ChannelId(channelId); _logger.Info("[TVHclient] HTSConnectionHandler.GetChannelImage() channelIcon: " + channelIcon); WebRequest request = WebRequest.Create("http://" + _tvhServerName + ":" + _httpPort + "/" + channelIcon); _logger.Info("[TVHclient] HTSConnectionHandler.GetChannelImage() WebRequest: " + "http://" + _tvhServerName + ":" + _httpPort + "/" + channelIcon); request.Headers["Authorization"] = _headers["Authorization"]; ImageStream imageStream = new ImageStream(); HttpWebResponse httpWebReponse = (HttpWebResponse)request.GetResponse(); Stream stream = httpWebReponse.GetResponseStream(); Image image = Image.FromStream(stream); imageStream.Stream = ImageToPNGStream(image); imageStream.Format = MediaBrowser.Model.Drawing.ImageFormat.Png; return(imageStream); } catch (Exception ex) { _logger.Error("[TVHclient] HTSConnectionHandler.GetChannelImage() caught exception: " + ex.Message); return(null); } }
public static void Main() { // The path to the documents directory. string dataDir = Path.GetFullPath("../../../Data/"); //Initialize an instance of OcrEngine OcrEngine ocrEngine = new OcrEngine(); //Set Image property by loading an image from file path ocrEngine.Image = ImageStream.FromFile(dataDir + "Sampleocr.bmp"); //Run recognition process if (ocrEngine.Process()) { Console.WriteLine(ocrEngine.Text); //Retrieve an array of recognized text by parts IRecognizedPartInfo[] text = ocrEngine.Text.PartsInfo; //Iterate over the text parts foreach (IRecognizedTextPartInfo symbol in text) { //Display the part information Console.WriteLine("Symbol:" + symbol.Text); //Get the rectangle sourronding the symbol Rectangle box = symbol.Box; //Display the rectangle location on the image canvas Console.WriteLine("Box Location:" + box.Location); //Display the rectangle dimensions Console.WriteLine("Box Size:" + box.Size); } } }
public void Apply() { try { var options = new MediaFileGetImageOptions { Width = Width, Height = Height, Quality = Quality, Orientation = (MediaFileImageOrientation)Enum.Parse(typeof(MediaFileImageOrientation), Orientation, true), ResizeAspect = (MediaFileGetImageOptions.ImageResizeAspect)Enum.Parse(typeof(MediaFileGetImageOptions.ImageResizeAspect), ResizeMode, true), }; _img?.Dispose(); _img = _asset.GetImage(options); Details = $"{_img.Width} x {_img.Height} - {Math.Round(_img.Length / 1024d, 2)}KB"; Image = ImageSource.FromStream(() => _img); } catch { Image = null; Details = string.Empty; } }
public void StretchImage_fitSize() { Document doc = DocumentHelper.CreateTemplateDocumentForReportingEngine("<<image [src.Image] -fitSize>>"); ImageStream imageStream = new ImageStream(new FileStream(_image, FileMode.Open, FileAccess.Read)); BuildReport(doc, imageStream, "src"); MemoryStream dstStream = new MemoryStream(); doc.Save(dstStream, SaveFormat.Docx); doc = new Document(dstStream); NodeCollection shapes = doc.GetChildNodes(NodeType.Shape, true); foreach (Shape shape in shapes) { // Assert that the image is really insert in textbox Assert.IsTrue(shape.ImageData.HasImage); //Assert that height is changed and width is changed Assert.AreNotEqual(346.35, shape.Height); Assert.AreNotEqual(431.5, shape.Width); } dstStream.Dispose(); }
public override void Stop() { if (!isStopped) { Console.WriteLine("Stopped NetCam Server..."); CloseCamera(); isStopped = true; if (tcpListener != null) { tcpListener.Stop(); } if (imageStream != null) { imageStream.Stop(); } if (currentCamClient != null) { currentCamClient.Close(); } if (camNS != null) { camNS.Close(); } tcpListener = null; imageStream = null; currentCamClient = null; camNS = null; } }
public static void Run() { // ExStart:ExtractPreprocessedImage // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR(); // Initialize an instance of OcrEngine OcrEngine ocrEngine = new OcrEngine(); // set SavePreprocessedImages to true to save preprocessed images ocrEngine.Config.SavePreprocessedImages = true; // Set the Image property by loading the image from file path location or an instance of Stream ocrEngine.Image = ImageStream.FromFile(dataDir + "Sample.jpg"); if (ocrEngine.Process()) { //Save binarized image on disc ocrEngine.PreprocessedImages.BinarizedImage.Save(dataDir + "Output\\BinarizedImage.png", System.Drawing.Imaging.ImageFormat.Png); //Save filtered image on disc ocrEngine.PreprocessedImages.FilteredImage.Save(dataDir + "Output\\FilteredImage.png", System.Drawing.Imaging.ImageFormat.Png); //Save image after removing non-textual fragments ocrEngine.PreprocessedImages.NonTextRemovedImage.Save(dataDir + "Output\\NonTextRemovedImage.png", System.Drawing.Imaging.ImageFormat.Png); //Save image after skew correction ocrEngine.PreprocessedImages.RotatedImage.Save(dataDir + "Output\\RotatedImage.png", System.Drawing.Imaging.ImageFormat.Png); //Save image after textual block detection ocrEngine.PreprocessedImages.TextBlocksImage.Save(dataDir + "Output\\TextBlocksImage.png", System.Drawing.Imaging.ImageFormat.Png); } Console.WriteLine(ocrEngine.Text); // ExEnd:ExtractPreprocessedImage }
public static void Run() { // ExStart:DetectingTextBlocks // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OMR(); // Initialize an instance of OcrEngine OcrEngine ocrEngine = new OcrEngine(); // Set Image property by loading an image from file path ocrEngine.Image = ImageStream.FromFile(dataDir + "Sampleocr.bmp"); // Remove non-textual blocks ocrEngine.Config.RemoveNonText = true; // Run recognition process if (ocrEngine.Process()) { // Display text block locations foreach (IRecognizedPartInfo part in ocrEngine.Text.PartsInfo) { Console.WriteLine(part.Box); } } // ExEnd:DetectingTextBlocks }
public void StretchImagefitSize() { Document doc = DocumentHelper.CreateTemplateDocumentWithDrawObjects("<<image [src.Image] -fitSize>>", ShapeType.TextBox); ImageStream imageStream = new ImageStream(new FileStream(this._image, FileMode.Open, FileAccess.Read)); BuildReport(doc, imageStream, "src", ReportBuildOptions.None); MemoryStream dstStream = new MemoryStream(); doc.Save(dstStream, SaveFormat.Docx); doc = new Document(dstStream); NodeCollection shapes = doc.GetChildNodes(NodeType.Shape, true); foreach (Shape shape in shapes) { // Assert that the image is really insert in textbox Assert.IsTrue(shape.ImageData.HasImage); //Assert that height is changed and width is changed Assert.AreNotEqual(346.35, shape.Height); Assert.AreNotEqual(431.5, shape.Width); } dstStream.Dispose(); }
public static bool FindTextInImageFile(string fileFullPath, string text, ref CancellationTokenSource cts) { try { // Create an instance of OcrEngine class var ocr = new OcrEngine(); // Set the Image property of OcrEngine by reading an image file ocr.Image = ImageStream.FromFile(fileFullPath); // Set the RemoveNonText to true ocr.Config.DetectTextRegions = true; // Perform OCR operation if (ocr.Process()) { if (ocr.Text.ToString().Contains(text)) { return(true); } } } catch (Exception) { return(false); } return(false); }
public bool Write(string filePath) { try { if (ImageStream != null && !string.IsNullOrEmpty(filePath)) { return(ImageStream.WriteToFile(filePath)); } } catch (Exception e) { DebugHelper.WriteException(e); string message = $"{Resources.ImageData_Write_Error_Message}\r\n\"{filePath}\""; if (e is UnauthorizedAccessException || e is FileNotFoundException) { message += "\r\n\r\n" + Resources.YourAntiVirusSoftwareOrTheControlledFolderAccessFeatureInWindowsCouldBeBlockingShareX; } MessageBox.Show(message, "ShareX - " + Resources.Error, MessageBoxButtons.OK, MessageBoxIcon.Error); } return(false); }
public override async Task ExecuteResultAsync(ActionContext context) { if (context == null) { throw new ArgumentNullException(nameof(context)); } try { var response = context.HttpContext.Response; response.ContentType = ContentType; var buffer = new byte[4096]; while (true) { var read = ImageStream.Read(buffer, 0, buffer.Length); if (read == 0) { break; } await response.BodyWriter.WriteAsync(buffer); } await response.CompleteAsync(); } catch (Exception ex) { Debug.WriteLine(ex.Message); } }
internal XLPicture(IXLWorksheet worksheet, Stream stream) : this(worksheet) { if (stream == null) { throw new ArgumentNullException(nameof(stream)); } this.ImageStream = new MemoryStream(); { stream.Position = 0; stream.CopyTo(ImageStream); ImageStream.Seek(0, SeekOrigin.Begin); using (var bitmap = new Bitmap(ImageStream)) { if (FormatMap.Values.Select(f => f.Guid).Contains(bitmap.RawFormat.Guid)) { this.Format = FormatMap.Single(f => f.Value.Guid.Equals(bitmap.RawFormat.Guid)).Key; } DeduceDimensionsFromBitmap(bitmap); } ImageStream.Seek(0, SeekOrigin.Begin); } }
internal XLPicture(IXLWorksheet worksheet, Stream stream, XLPictureFormat format) : this(worksheet) { if (stream == null) { throw new ArgumentNullException(nameof(stream)); } this.Format = format; this.ImageStream = new MemoryStream(); { stream.Position = 0; stream.CopyTo(ImageStream); ImageStream.Seek(0, SeekOrigin.Begin); using (var bitmap = new Bitmap(ImageStream)) { if (FormatMap.ContainsKey(this.Format)) { if (FormatMap[this.Format].Guid != bitmap.RawFormat.Guid) { throw new ArgumentException("The picture format in the stream and the parameter don't match"); } } DeduceDimensionsFromBitmap(bitmap); } ImageStream.Seek(0, SeekOrigin.Begin); } }
public static void Main() { // The path to the documents directory. string dataDir = Aspose.OCR.Examples.Utils.GetDataDir(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType); //Initialize an instance of OcrEngine OcrEngine ocrEngine = new OcrEngine(); //Set the Image property by loading the image from file path location or an instance of MemoryStream ocrEngine.Image = ImageStream.FromFile(dataDir + "Sampleocr.bmp"); //Get an instance of INotifier INotifier processorWord = NotifierFactory.WordNotifier(); //Write a delegate to handle the Elapsed event processorWord.Elapsed += delegate { //Display the recognized text on screen Console.WriteLine(processorWord.Text); }; // Add the word processor to the OcrEngine ocrEngine.AddNotifier(processorWord); //Process the image ocrEngine.Process(); }
public static void Main() { // The path to the documents directory. string dataDir = Path.GetFullPath("../../../Data/"); //Initialize an instance of OcrEngine OcrEngine ocrEngine = new OcrEngine(); //Set Image property by loading an image from file path ocrEngine.Image = ImageStream.FromFile(dataDir + "Sampleocr.bmp"); //Run recognition process if (ocrEngine.Process()) { Console.WriteLine(ocrEngine.Text); // Retrieve an array of recognized text by parts IRecognizedPartInfo[] text = ocrEngine.Text.PartsInfo; // Iterate over the text parts foreach (IRecognizedTextPartInfo symbol in text) { // Display part intomation Console.WriteLine("Text : " + symbol.Text); Console.WriteLine("isItalic : " + symbol.Italic); Console.WriteLine("isUnderline : " + symbol.Underline); Console.WriteLine("isBold : " + symbol.Bold); Console.WriteLine("FontSize : " + symbol.FontSize); Console.WriteLine("Language : " + symbol.Language); } } }
void _updateSourceStream(ImageStream newStream) { if (_imageStream?.key == newStream?.key) { return; } if (_isListeningToStream) { _imageStream.removeListener(_getListener()); } if (!widget.gaplessPlayback) { setState(() => { _imageInfo = null; }); } setState(() => { _loadingProgress = null; _frameNumber = 0; _wasSynchronouslyLoaded = false; }); _imageStream = newStream; if (_isListeningToStream) { _imageStream.addListener(_getListener()); } }
public void resetNodeAttributes(bool TypeofNode) { this.joinStatus = false; this.nodeDead = false;//the node is alive this.nodeType = TypeofNode; this.children = 0; this.nodeState = 0;// the node is in the receiving mode. this.old_p_mac_x = -100; this.old_p_mac_y = -100; this.p_mac_x = 40000; this.p_mac_y = 40000; this.MessageCycles = 0; this.totalBitsTransmitted = 0; this.totalBitsReceived = 0; this.childMacList = null; this.receivedApplicationMessages = null; this.receivedApplicationMessagesCount = 0; if (TypeofNode == false) //ordinary node { this.img = ImageStream.FromFile("unDecidedNode.bmp"); this.battery.BATTERYPOWER = 0.5; this.nodeState = 0; this.hops = 10000; this.PowerOfThePath = 0; } else //base station { this.img = ImageStream.FromFile("BaseStation.bmp"); this.battery.BATTERYPOWER = 10000; this.nodeState = 1; this.hops = 0; this.PowerOfThePath = 1000; } }
public static void Run() { // ExStart:GettingNotification // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OMR(); // Initialize an instance of OcrEngine OcrEngine ocrEngine = new OcrEngine(); // Set the Image property by loading the image from file path location or an instance of MemoryStream ocrEngine.Image = ImageStream.FromFile(dataDir + "Sampleocr.bmp"); // Get an instance of INotifier INotifier processorWord = NotifierFactory.WordNotifier(); //Write a delegate to handle the Elapsed event processorWord.Elapsed += delegate { // Display the recognized text on screen Console.WriteLine(processorWord.Text); }; // Add the word processor to the OcrEngine ocrEngine.AddNotifier(processorWord); // Process the image ocrEngine.Process(); // ExEnd:GettingNotification }
public static void Run() { // ExStart:OCROnMultipageTIFF // The path to the documents directory. string dataDir = RunExamples.GetDataDir_OCR() + "SampleTiff.tiff"; // Create an initialize an instance of OcrEngine OcrEngine engine = new OcrEngine(); // Set the OcrEngine.Image property by loading a multipage TIFF from disk, memory or URL engine.Image = ImageStream.FromFile(dataDir); // Set OcrEngine.ProcessAllPages to true in order to process all pages of TIFF in single run engine.ProcessAllPages = true; // Call OcrEngine.Process method to perform OCR operation if (engine.Process()) { // Retrieve the list of Pages Page[] pages = engine.Pages; // Iterate over the list of Pages foreach (Page page in pages) { // Display the recognized text from each Page Console.WriteLine(page.PageText); } } // ExStart:OCROnMultipageTIFF }
void SetCandy(Writer.IconTypes ic) { currentIcon = new ImageStream(); currentIcon.loc = new MemoryStream(db.GetIcon(ic)); currentIcon.gif = Image.FromStream(currentIcon.loc); foreach (var corn in candyCorn) { corn.Image = currentIcon.gif; } }
void CleanControls() { CleanComic(); if (trick != null) { trick.Dispose(); trick = null; } RemoveControl(mainPanel); for (int i = 0; i < mspaHeaderLink.Count(); ++i) RemoveControl(mspaHeaderLink[i]); for (int i = 0; i < candyCorn.Count(); ++i) RemoveControl(candyCorn[i]); RemoveControl(headerPanel); RemoveControl(pageLoadingProgress); RemoveControl(progressLabel); if (currentIcon != null) { currentIcon.gif.Dispose(); currentIcon.loc.Dispose(); currentIcon = null; } if (headerP != null) { headerP.gif.Dispose(); headerP.loc.Dispose(); headerP = null; } }