private async void OnCaptureButton_Click(Object sender, RoutedEventArgs e) { Visibility = Visibility.Hidden; try { CaptureWindow captureWindow = new CaptureWindow(); Boolean result = captureWindow.ShowDialog().Value; if (result) { // The region is captured. Run OCR and Translate MemoryStream ms; var bitmapSource = CopyScreen(captureWindow.Rect, out ms); captureImage.Source = bitmapSource; Visibility = Visibility.Visible; initialized = true; InMemoryRandomAccessStream inMemoryStream = new InMemoryRandomAccessStream(); ms.Position = 0; Byte[] byteArray = ms.GetBuffer(); inMemoryStream.AsStream().Write(byteArray, 0, byteArray.Length); inMemoryStream.AsStream().Flush(); Windows.Graphics.Imaging.BitmapDecoder bitmapDecoder = await Windows.Graphics.Imaging.BitmapDecoder.CreateAsync(inMemoryStream); SoftwareBitmap softwareBitmap = await bitmapDecoder.GetSoftwareBitmapAsync().AsTask().ConfigureAwait(true); captureImage.Tag = softwareBitmap; // for later use await RunOcrAndTranslate(); } } catch { } Visibility = Visibility.Visible; }
public async static Task <byte[]> ToByteArrayAsync(this WriteableBitmap image, Abstractions.ImageFormat format, int quality = 100) { byte[] resultArray = null; WriteableBitmap writeableBitmap = image; using (IRandomAccessStream ms = new InMemoryRandomAccessStream()) { try { byte[] bytes; using (Stream stream = image.PixelBuffer.AsStream()) { bytes = new byte[(uint)stream.Length]; await stream.ReadAsync(bytes, 0, bytes.Length); } BitmapPropertySet propertySet = new BitmapPropertySet(); BitmapTypedValue qualityValue = new BitmapTypedValue( quality * .01, Windows.Foundation.PropertyType.Single ); propertySet.Add("ImageQuality", qualityValue); BitmapEncoder encoder = null; if (format == Abstractions.ImageFormat.PNG) { encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, ms); } else { encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms, propertySet); } encoder.SetPixelData(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore, (uint)image.PixelWidth, (uint)image.PixelHeight, 96, 96, bytes); await encoder.FlushAsync(); resultArray = new byte[ms.AsStream().Length]; await ms.AsStream().ReadAsync(resultArray, 0, resultArray.Length); } catch (Exception e) { System.Diagnostics.Debug.WriteLine(e.Message); } } return(resultArray); }
public static Stream GetImageStream(IList <Line>?lines, Size imageSize, Color backgroundColor) { if (lines == null) { return(Stream.Null); } var image = GetImageInternal(lines, backgroundColor); if (image == null) { return(Stream.Null); } using (image) { var fileStream = new InMemoryRandomAccessStream(); image.SaveAsync(fileStream, CanvasBitmapFileFormat.Jpeg).GetAwaiter().GetResult(); var stream = fileStream.AsStream(); stream.Position = 0; return(stream); } }
private async void button_Click(object sender, RoutedEventArgs e) { button.IsEnabled = false; button2.IsEnabled = false; PB.Visibility = Visibility.Visible; using (var captureStream = new InMemoryRandomAccessStream()) { await _mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), captureStream); using (var s = captureStream.AsStream()) { //await _mediaCapture.CapturePhotoToStorageFileAsync(ImageEncodingProperties.CreatePng(), file); var credentials = new StorageCredentials("vrdreamer", "lTD5XmjEhvfUsC/vVTLsl01+8pJOlMdF/ri7W1cNOydXwSdb8KQpDbiveVciOqdIbuDu6gJW8g44YtVjuBzFkQ=="); var client = new CloudBlobClient(new Uri("https://vrdreamer.blob.core.windows.net/"), credentials); var container = client.GetContainerReference("datasetimages"); var blockBlob = container.GetBlockBlobReference(Guid.NewGuid().ToString() + ".jpeg"); s.Position = 0; await blockBlob.UploadFromStreamAsync(s); ////await blockBlob.UploadFromFileAsync(captureStream); blobUrl = blockBlob.StorageUri.PrimaryUri.ToString(); var add = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.116 Safari/537.36"; var httpRequestMessage = new Windows.Web.Http.HttpRequestMessage(Windows.Web.Http.HttpMethod.Get, new Uri("http://www.google.com/searchbyimage?site=search&sa=X&image_url=" + blockBlob.StorageUri.PrimaryUri.ToString())); httpRequestMessage.Headers.Add("User-Agent", add); //items2 = await Table2.ToCollectionAsync(); web.NavigateWithHttpRequestMessage(httpRequestMessage); web.DOMContentLoaded += Web_DOMContentLoaded; } } }
public async Task <RecogniseResult> Recognise(Stream fileStream) { var randomAccessStream = fileStream.AsRandomAccessStream(); var bitmapDecoder = await BitmapDecoder.CreateAsync(randomAccessStream); var rawBitmap = await bitmapDecoder.GetSoftwareBitmapAsync(); var supportedBitmapFormats = FaceDetector.GetSupportedBitmapPixelFormats(); var supportedFormatBitmap = SoftwareBitmap.Convert(rawBitmap, supportedBitmapFormats.First()); var faceDetector = await FaceDetector.CreateAsync(); var faces = await faceDetector.DetectFacesAsync(supportedFormatBitmap); var result = new RecogniseResult(); if (faces.Any()) { result.Faces = faces.Count(); var memoryStream = new InMemoryRandomAccessStream(); var bitmapEncoder = await BitmapEncoder.CreateAsync(BitmapEncoder.BmpEncoderId, memoryStream); bitmapEncoder.SetSoftwareBitmap(rawBitmap); bitmapEncoder.BitmapTransform.Bounds = faces.First().FaceBox; await bitmapEncoder.FlushAsync(); result.FirstFace = memoryStream.AsStream(); } return(result); }
private async Task <string> RenderGridAsync() { // renders UIElement into a bitmap var rtb = new RenderTargetBitmap(); await rtb.RenderAsync(this.ChartGrid); // get the pixels from the rendered UI elements var pixelBuffer = await rtb.GetPixelsAsync(); var pixels = pixelBuffer.ToArray(); // Encode the pixels into a png var randomAccessStream = new InMemoryRandomAccessStream(); var bitmapEncoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, randomAccessStream); bitmapEncoder.SetPixelData(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Straight, (uint)rtb.PixelHeight, (uint)rtb.PixelWidth, 96, 96, pixels); await bitmapEncoder.FlushAsync(); randomAccessStream.Seek(0); var encodedImageBytes = new byte[randomAccessStream.Size]; await randomAccessStream.AsStream().ReadAsync(encodedImageBytes, 0, encodedImageBytes.Length); // convert the png byte[] into base64 var base64String = Convert.ToBase64String(encodedImageBytes); return(base64String); }
/// <summary> /// Upload the frame and get the face detect result /// </summary> /// public async void DetectFaces(SoftwareBitmap bitmap) { if (bitmap == null || _processingFace) { return; } _processingFace = true; try { using (var randomAccessStream = new InMemoryRandomAccessStream()) { var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, randomAccessStream); encoder.SetSoftwareBitmap(bitmap); await encoder.FlushAsync(); randomAccessStream.Seek(0); Face[] detectedfaces = await _faceServiceClient.DetectAsync(randomAccessStream.AsStream(), true, false, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.Glasses }); CheckPersons(detectedfaces, bitmap); } } catch (Exception ex) { // Debug.WriteLine("FaceAPIException HttpStatus: " + ex.HttpStatus + ", ErrorCode : " + ex.ErrorCode + ", ErrorMessage: " + ex.ErrorMessage); Debug.WriteLine("DetectFaces exception : " + ex.Message); ProcessResults(null, null, null); } }
public async Task <Stream> GetCurrentFrameAsync() { if (CurrentState != ScenarioState.Streaming) { return(null); } // If a lock is being held it means we're still waiting for processing work on the previous frame to complete. // In this situation, don't wait on the semaphore but exit immediately. if (!frameProcessingSemaphore.Wait(0)) { return(null); } try { var stream = new InMemoryRandomAccessStream(); await mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), stream); stream.Seek(0); return(stream.AsStream()); } catch { } finally { frameProcessingSemaphore.Release(); } return(null); }
public async Task <string> ExtractTextAsync(Bitmap bitmap, bool combineLines) { var text = new StringBuilder(); using var stream = new InMemoryRandomAccessStream(); bitmap.Save(stream.AsStream(), ImageFormat.Bmp); var decoder = await BitmapDecoder.CreateAsync(stream); using var softwareBitmap = await decoder.GetSoftwareBitmapAsync(); var ocrResult = await _winRtOCREngine.RecognizeAsync(softwareBitmap); foreach (var line in ocrResult.Lines) { if (combineLines) { text.Append(Replace(line.Text)); } else { text.AppendLine(Replace(line.Text)); } } return(text.ToString()); }
/// <summary> /// Decodes the supplied encoded bitmap data into an array of pixels. /// For public use only. /// </summary> public async Task <byte[]> DecodeAsync(byte[] encodedBytes) { #if NETFX_CORE using (var ras = new InMemoryRandomAccessStream()) { await ras.AsStream().WriteAsync(encodedBytes, 0, encodedBytes.Length); ras.Seek(0); var dec = await BitmapDecoder.CreateAsync(BitmapDecoder.JpegDecoderId, ras); var pixelDataProvider = await dec.GetPixelDataAsync(); return(pixelDataProvider.DetachPixelData()); } #else using (var str = new MemoryStream()) { str.Write(encodedBytes, 0, encodedBytes.Length); str.Position = 0; var dec = new JpegBitmapDecoder(str, BitmapCreateOptions.PreservePixelFormat, BitmapCacheOption.OnLoad); var frame = dec.Frames[0]; this.PixelFormat = frame.Format; var bpp = frame.Format.BitsPerPixel / 8; var stride = bpp * frame.PixelWidth; var size = stride * frame.PixelHeight; var output = new byte[size]; frame.CopyPixels(output, stride, 0); return(await Task.FromResult(output)); } #endif }
private async Task <String> Base64Image(WriteableBitmap image) { string imageString = ""; if (image != null) { using (var randomAccessStream = new InMemoryRandomAccessStream()) { var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, randomAccessStream); Stream pixelStream = image.PixelBuffer.AsStream(); byte[] pixels = new byte[pixelStream.Length]; await pixelStream.ReadAsync(pixels, 0, pixels.Length); encoder.SetPixelData(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore, (uint)image.PixelWidth, (uint)image.PixelHeight, 96.0, 96.0, pixels); await encoder.FlushAsync(); randomAccessStream.Seek(0); imageString = GetBase64EncodedString(randomAccessStream.AsStream()); } } return(imageString); }
void manager_DataRequested(DataTransferManager sender, DataRequestedEventArgs args) { // do the basics... var data = args.Request.Data; data.Properties.Title = "Deferred image"; data.Properties.Description = "I'll have to be downloaded first!"; // get a deferral... data.SetDataProvider(StandardDataFormats.Bitmap, async(request) => { var deferral = request.GetDeferral(); try { // download... using (var inStream = await new HttpClient().GetStreamAsync("http://streetfoo.apphb.com/images/graffiti00.jpg")) { // copy the stream... but we'll need to obtain a facade // to map between WinRT and .NET... var outStream = new InMemoryRandomAccessStream(); inStream.CopyTo(outStream.AsStream()); // send that... var reference = RandomAccessStreamReference.CreateFromStream(outStream); request.SetData(reference); } } finally { deferral.Complete(); } }); }
/// <summary> /// Take a new photo and hold it in the output stream. /// </summary> /// <param name="parameters">The set of parameters used to take the photo, <see cref="ICameraResolution"/> class.</param> /// <param name="format">The image format. <see cref="PhotoCaptureFormat"/> enum. </param> /// <returns>The <see cref="PhotoCaptureStreamResult"/> struct.</returns> public async Task <PhotoCaptureStreamResult> TakePhotoToStreamAsync(ICameraResolution parameters, PhotoCaptureFormat format) { if (!this.isInitialized) { Debug.WriteLine("First you need to initialize the videocapturemanager."); return(new PhotoCaptureStreamResult()); } try { if (parameters != null) { Debug.WriteLine("Applying paramenters..."); await this.mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.Photo, (parameters as StreamResolution).EncodingProperties); } Debug.WriteLine("Taking photo..."); var stream = new InMemoryRandomAccessStream(); var properties = this.GetImageEncodingProperties(format); await this.mediaCapture.CapturePhotoToStreamAsync(properties, stream); Debug.WriteLine("Photo stream loaded."); return(new PhotoCaptureStreamResult(true, stream.AsStream())); } catch (Exception ex) { Debug.WriteLine("Exception when taking a photo: " + ex.ToString()); return(new PhotoCaptureStreamResult(false, Stream.Null)); } }
private async void Button_Click_1(object sender, RoutedEventArgs e) { try { ProgressRingControl.IsActive = true; CloudStorageAccount LCloudStorageAccount = CloudStorageAccount.Parse(ConnectionControl.Text); CloudBlobClient LBlobClient = LCloudStorageAccount.CreateCloudBlobClient(); CloudBlobContainer LBlobContainer = LBlobClient.GetContainerReference(ContainerControl.Text); await LBlobContainer.CreateIfNotExistsAsync(); CloudBlockBlob LBlockBlob = LBlobContainer.GetBlockBlobReference(DateTime.UtcNow.ToString("yyyy-MM-dd-HH-mm-ss-fff") + ".jpg"); var lstream = new InMemoryRandomAccessStream(); await bitmapImageExMod.ToStreamAsJpeg(lstream); await LBlockBlob.UploadFromStreamAsync(lstream.AsStream()); CloudQueueClient LQueueClient = LCloudStorageAccount.CreateCloudQueueClient(); CloudQueue LQueue = LQueueClient.GetQueueReference(QueueControl.Text); await LQueue.CreateIfNotExistsAsync(); ImageItem LImageItem = new ImageItem { Path = LBlockBlob.Uri.ToString(), TopLeft = pointArray[0], TopRight = pointArray[1], BottomLeft = pointArray[2], BottomRight = pointArray[3] }; string jsonString = JsonConvert.SerializeObject(LImageItem); CloudQueueMessage LQueueMessage = new CloudQueueMessage(jsonString); await LQueue.AddMessageAsync(LQueueMessage); } finally { ProgressRingControl.IsActive = false; } }
public static async Task <string> SendImage() { try { if (MainPage.oldImg == null) { return(""); } var stream = new InMemoryRandomAccessStream(); BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream); encoder.SetSoftwareBitmap(MainPage.oldImg); await encoder.FlushAsync(); var ms = new MemoryStream(); stream.AsStream().CopyTo(ms); var tdata = ms.ToArray(); var x = Convert.ToBase64String(tdata); ms.Dispose(); stream.Dispose(); return(x); } catch (Exception e) { Debug.WriteLine(e.Message); return(""); } }
/// <summary> /// Get image stream from points /// </summary> /// <param name="points">Drawing points</param> /// <param name="imageSize">Image size</param> /// <param name="lineWidth">Line Width</param> /// <param name="strokeColor">Line color</param> /// <param name="backgroundColor">Image background color</param> /// <returns>Image stream</returns> public static Stream GetImageStream(IList <Point> points, Size imageSize, float lineWidth, Color strokeColor, Color backgroundColor) { if (points == null || points.Count < 2) { return(Stream.Null); } var image = GetImageInternal(points, lineWidth, strokeColor, backgroundColor); if (image == null) { return(Stream.Null); } using (image) { var fileStream = new InMemoryRandomAccessStream(); image.SaveAsync(fileStream, CanvasBitmapFileFormat.Jpeg).GetAwaiter().GetResult(); var stream = fileStream.AsStream(); stream.Position = 0; return(stream); } }
public async void AnalyzeEmotion(ThreadPoolTimer timer) { if (!EmotionProcessingSemaphore.Wait(0)) { return; } using (var captureStream = new InMemoryRandomAccessStream()) { await _mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), captureStream); captureStream.Seek(0); try { emotion = await EmotionServiceClient.RecognizeAsync(captureStream.AsStream()); System.Diagnostics.Debug.WriteLine(DateTime.Now); } // Catch and display Face API errors. catch (FaceAPIException f) { System.Diagnostics.Debug.WriteLine(f.ErrorMessage, f.ErrorCode); } // Catch and display all other errors. catch (Exception e) { System.Diagnostics.Debug.WriteLine(e.Data); } } EmotionProcessingSemaphore.Release(); }
private async Task <IRandomAccessStreamReference> GetMapIconDonutImage(Grid drawingContainer, ParkingLot lot) { var drawDonut = new ParkingLotLoadDonut { Animate = false, ParkingLot = lot, Style = Application.Current.Resources["ParkingLotMapIconDonutStyle"] as Style }; drawingContainer.Children.Add(drawDonut); var rtb = new RenderTargetBitmap(); await rtb.RenderAsync(drawDonut); drawingContainer.Children.Remove(drawDonut); //TODO: take care of possible scaling issues var pixels = (await rtb.GetPixelsAsync()).ToArray(); var stream = new InMemoryRandomAccessStream(); var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.BmpEncoderId, stream); encoder.SetPixelData(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Straight, (uint)rtb.PixelWidth, (uint)rtb.PixelHeight, 96, 96, pixels); await encoder.FlushAsync(); stream.Seek(0); return(RandomAccessStreamReference.CreateFromStream(stream.AsStream().AsRandomAccessStream())); }
private async Task <Stream> GetImageStreamInternal(SignatureImageFormat format, Size scale, Rect signatureBounds, Size imageSize, float strokeWidth, Color strokeColor, Color backgroundColor) { CanvasBitmapFileFormat cbff; if (format == SignatureImageFormat.Jpeg) { cbff = CanvasBitmapFileFormat.Jpeg; } else if (format == SignatureImageFormat.Png) { cbff = CanvasBitmapFileFormat.Png; } else { return(null); } using (var offscreen = GetRenderTarget(scale, signatureBounds, imageSize, strokeWidth, strokeColor, backgroundColor)) { var fileStream = new InMemoryRandomAccessStream(); await offscreen.SaveAsync(fileStream, cbff); var stream = fileStream.AsStream(); stream.Position = 0; return(stream); } }
private async Task <MemoryStream> DrawText() { using (var device = new CanvasDevice()) { using (var renderTarget = new CanvasRenderTarget( device, ImageWidth, ImageHeight, ImageDPI)) { using (var drawingSession = renderTarget.CreateDrawingSession()) { drawingSession.Clear(BackgroundColor); using (var format = new CanvasTextFormat()) { TextLines(drawingSession, format); } } using (var stream = new InMemoryRandomAccessStream()) { await renderTarget.SaveAsync(stream, CanvasBitmapFileFormat.Png, 1f); var memoryStream = new MemoryStream(); await stream.AsStream().CopyToAsync(memoryStream); return(memoryStream); } } } }
private async void InvertImage(object sender, RoutedEventArgs e) { CanvasDevice device = CanvasDevice.GetSharedDevice(); CanvasRenderTarget target = new CanvasRenderTarget(device, _clickData.Image.PixelWidth, _clickData.Image.PixelHeight, 96); using (CanvasDrawingSession session = target.CreateDrawingSession()) using (var stream = new MemoryStream(_clickData.Data).AsRandomAccessStream()) { session.Clear(Colors.Transparent); session.DrawImage(new InvertEffect { Source = await CanvasBitmap.LoadAsync(device, stream) as ICanvasImage }); } using (var stream = new InMemoryRandomAccessStream()) { await target.SaveAsync(stream, CanvasBitmapFileFormat.Jpeg); var bytes = new byte[stream.Size]; await stream.AsStream().ReadAsync(bytes, 0, bytes.Length); var bitmap = new BitmapImage(); stream.Seek(0); await bitmap.SetSourceAsync(stream); Images[Images.IndexOf(_clickData)].Data = bytes; Images[Images.IndexOf(_clickData)].Image = bitmap; } }
/// <summary> /// Retrieves the signature image from the canvas /// </summary> /// <param name="imgFormat"></param> /// <returns></returns> public Stream GetImage(ImageFormatType imgFormat) { var imageTask = Task.Run(async() => { //Create new Random Access Tream var stream = new InMemoryRandomAccessStream(); //Create bitmap encoder var encoder = await BitmapEncoder.CreateAsync( imgFormat == ImageFormatType.Png ? BitmapEncoder.PngEncoderId : BitmapEncoder.JpegEncoderId, stream); //Set the pixel data and flush it encoder.SetPixelData( BitmapPixelFormat.Bgra8, BitmapAlphaMode.Straight, (uint)bitmapInfo.PixelWidth, (uint)bitmapInfo.PixelHeight, 96d, 96d, bitmapInfo.BitmapBuffer.ToArray()); await encoder.FlushAsync(); return(stream.AsStream()); }); imageTask.Wait(); return(imageTask.Result); }
internal static Bitmap Create(Stream stream) { Bitmap bitmap = null; Task.Run(async() => { using (var raStream = new InMemoryRandomAccessStream()) { await stream.CopyToAsync(raStream.AsStream()); var decoder = await BitmapDecoder.CreateAsync(raStream); var pixelData = await decoder.GetPixelDataAsync(); var width = (int)decoder.OrientedPixelWidth; var height = (int)decoder.OrientedPixelHeight; const PixelFormat format = PixelFormat.Format32bppArgb; var bytes = pixelData.DetachPixelData(); bitmap = new Bitmap(width, height, format); var data = bitmap.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, format); Marshal.Copy(bytes, 0, data.Scan0, bytes.Length); bitmap.UnlockBits(data); } }).Wait(); return(bitmap); }
private async Task <Stream> TakePicture(Image captureImage) { Debug.WriteLine("In TakePicture"); IRandomAccessStream photoStream = null; try { photoStream = new InMemoryRandomAccessStream(); await camera.MyMediaCapture.CapturePhotoToStreamAsync( ImageEncodingProperties.CreateJpeg(), photoStream); photoStream.Seek(0L); var bitmap = new BitmapImage(); bitmap.SetSource(photoStream); captureImage.Source = bitmap; photoStream.Seek(0L); } catch (Exception ex) { Debug.WriteLine($"Exception in TakePicture: {ex.Message}"); } return(photoStream?.AsStream()); }
private async Task <Stream> WriteToStreamAsync(CapturedFrame frame) { using (var outputStream = new InMemoryRandomAccessStream()) { var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, outputStream); encoder.IsThumbnailGenerated = false; encoder.SetSoftwareBitmap(frame.SoftwareBitmap); try { await encoder.FlushAsync(); } catch (Exception e) { Debug.WriteLine(e.Message); } await outputStream.FlushAsync(); var ms = new MemoryStream(); await outputStream.AsStream().CopyToAsync(ms); ms.Position = 0; return(ms); } }
/// <summary> /// RTF형식을 Html 형식으로 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void ToHtmlButton_Click(object sender, RoutedEventArgs e) { using (var memory = new InMemoryRandomAccessStream()) { RichEditBoxTest.Document.SaveToStream(Windows.UI.Text.TextGetOptions.FormatRtf, memory); var streamToSave = memory.AsStream(); Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); TextReader textReader = new StreamReader(streamToSave, Encoding.UTF8); // RTFPipe 라이브러리 사용 var libHtml = Rtf.ToHtml(new RtfSource(textReader)); libHtml = RtfToHtmlConverter.ParseHtmlText(libHtml); // Html code 출력 HtmlCodeViewer.Text = libHtml; var storageFolder = ApplicationData.Current.LocalFolder; // WebView navigation은 LoaclFolder 바로 밑에서는 동작하지 않아 폴더 새로 만들어줌 var testFolder = await storageFolder.CreateFolderAsync("TestFolder", CreationCollisionOption.ReplaceExisting); var testHtmlFile = await testFolder.CreateFileAsync("rtftohtmltestpage.html", CreationCollisionOption.ReplaceExisting); await FileIO.WriteTextAsync(testHtmlFile, libHtml); RtfToHtmlViewer.Navigate(new Uri("ms-appdata:///local/TestFolder/rtftohtmltestpage.html")); } // 기존 제작 컨버터 //RichEditBoxTest.Document.GetText(Windows.UI.Text.TextGetOptions.FormatRtf, out string fvff); //string htmlcode = await RtfToHtmlConverter.ParseRtfText(fvff); }
/// <summary> /// Html 텍스트를 RTF 또는 RichEditBox에 직접 넣기 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void ToRtfButton_Click(object sender, RoutedEventArgs e) { // RTF 형식 데이터 얻어오기 byte[] bytes; using (var memory = new InMemoryRandomAccessStream()) { RichEditBoxTest.Document.SaveToStream(Windows.UI.Text.TextGetOptions.FormatRtf, memory); var streamToSave = memory.AsStream(); var dataReader = new DataReader(streamToSave.AsInputStream()); bytes = new byte[streamToSave.Length]; await dataReader.LoadAsync((uint)streamToSave.Length); dataReader.ReadBytes(bytes); } string result = System.Text.Encoding.UTF8.GetString(bytes); RawTextBlock.Text = result; // Clipboard에 복사 // var data = new DataPackage(); // data.SetText(result); // Clipboard.SetContent(data); }
private async void Run() { await _HubConnection.Start(); var cam = new MediaCapture(); await cam.InitializeAsync(new MediaCaptureInitializationSettings() { MediaCategory = MediaCategory.Media, StreamingCaptureMode = StreamingCaptureMode.Video }); _Sensor.MotionDetected += async(int pinNum) => { var stream = new InMemoryRandomAccessStream(); Stream imageStream = null; try { await Task.Factory.StartNew(async() => { _Sensor.IsActive = false; await cam.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), stream); stream.Seek(0); imageStream = stream.AsStream(); imageStream.Seek(0, SeekOrigin.Begin); string imageUrl = await NotificationHelper.UploadImageAsync(imageStream); switch (await OxfordHelper.IdentifyAsync(imageUrl)) { case AuthenticationResult.IsOwner: // open the door MotorController.PWM(26); break; case AuthenticationResult.Unkown: // send notification to the owner NotificationHelper.NotifyOwnerAsync(imageUrl); break; case AuthenticationResult.None: default: break; } _Sensor.IsActive = true; }); } finally { if (stream != null) { stream.Dispose(); } if (imageStream != null) { imageStream.Dispose(); } } }; }
internal static async Task <Stream> ResizePhoto(Stream photo, int height) { InMemoryRandomAccessStream result = new InMemoryRandomAccessStream(); await ResizePhoto(photo, height, result); return(result.AsStream()); }
private void ProcessFrames() { _isStopped = false; while (!_isStopping) { try { GarbageCollectorCanWorkHere(); var frame = _mediaFrameReader.TryAcquireLatestFrame(); if (frame == null || frame.VideoMediaFrame == null || frame.VideoMediaFrame.SoftwareBitmap == null) { continue; } using (var stream = new InMemoryRandomAccessStream()) { using (var bitmap = SoftwareBitmap.Convert(frame.VideoMediaFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore)) { var imageTask = BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream, _imageQuality).AsTask(); imageTask.Wait(); var encoder = imageTask.Result; encoder.SetSoftwareBitmap(bitmap); //Rotate image 180 degrees var transform = encoder.BitmapTransform; transform.Rotation = BitmapRotation.Clockwise180Degrees; var flushTask = encoder.FlushAsync().AsTask(); flushTask.Wait(); using (var asStream = stream.AsStream()) { asStream.Position = 0; var image = new byte[asStream.Length]; asStream.Read(image, 0, image.Length); Frame = image; encoder = null; } } } } catch (Exception exception) { Logger.Write(nameof(Camera), exception).Wait(); } } _isStopped = true; }