/// <summary> /// 获取Emoji的JSON /// </summary> /// <returns>JSON</returns> public async static Task <string> GetEmojis() { using (HttpClient client = new HttpClient()) { try { HttpResponseMessage response = await client.GetAsync(new Uri(EmojiLink)); if (null != response && response.StatusCode == HttpStatusCode.Ok) { using (InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream()) { await response.Content.WriteToStreamAsync(stream); stream.Seek(0); Windows.Storage.Streams.Buffer buffer = new Windows.Storage.Streams.Buffer((uint)stream.Size); await stream.ReadAsync(buffer, (uint)stream.Size, Windows.Storage.Streams.InputStreamOptions.Partial); using (DataReader reader = DataReader.FromBuffer(buffer)) { return(reader.ReadString((uint)stream.Size)); } } } }catch (Exception e) { Debug.WriteLine(e.Message); return(string.Empty); } return(string.Empty); } }
private async Task <MemoryStream> GetImageAsStream(SoftwareBitmap softwareBitmap, Guid guid) { MemoryStream theStream; byte[] array = null; var memoryStream = new InMemoryRandomAccessStream(); // Get a way to get a hold of the image bits var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.BmpEncoderId, memoryStream); encoder.SetSoftwareBitmap(softwareBitmap); try { await encoder.FlushAsync(); } catch (Exception ex) { return(new MemoryStream()); } // make the data array large enough to hold on to the bits array = new byte[memoryStream.Size]; // Copy all the bits to the array await memoryStream.ReadAsync(array.AsBuffer(), (uint)memoryStream.Size, InputStreamOptions.None); // Create the stream using the bits in the array theStream = new MemoryStream(array); return(theStream); }
private async static Task <string> GetBase64OfBitmap(SoftwareBitmap sourceImage) { byte[] bytes = new byte[0]; using (var randomAccessStream = new InMemoryRandomAccessStream()) { var encoder = await BitmapEncoder.CreateAsync( BitmapEncoder.PngEncoderId, randomAccessStream ); SoftwareBitmap bitmap = new SoftwareBitmap( sourceImage.BitmapPixelFormat, sourceImage.PixelWidth, sourceImage.PixelHeight, sourceImage.BitmapAlphaMode ); sourceImage.CopyTo(bitmap); encoder.SetSoftwareBitmap(bitmap); await encoder.FlushAsync(); bytes = new byte[randomAccessStream.Size]; await randomAccessStream.ReadAsync( bytes.AsBuffer(), (uint)bytes.Length, InputStreamOptions.None ); } var base64String = Convert.ToBase64String(bytes); return(base64String); }
public static async Task <IBuffer> SaveAsPngIntoBufferAsync(this Canvas canvas, double _scaleFactor, int dpiForImage = 200) { //string currentresolution = Window.Current.Bounds.Width * scaleFactor + "*" + Window.Current.Bounds.Height * scaleFactor; RenderTargetBitmap renderTargetBitmap = new RenderTargetBitmap(); await renderTargetBitmap.RenderAsync(canvas); var pixels = await renderTargetBitmap.GetPixelsAsync(); using (IRandomAccessStream stream = new InMemoryRandomAccessStream()) { var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, stream); byte[] bytes = pixels.ToArray(); await CoreWindow.GetForCurrentThread().Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { encoder.SetPixelData(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore, (uint)(canvas.ActualWidth * _scaleFactor), (uint)(canvas.ActualHeight * _scaleFactor), dpiForImage, dpiForImage, bytes); }); await encoder.FlushAsync(); stream.Seek(0); var buffer = WindowsRuntimeBuffer.Create((int)stream.Size); await stream.ReadAsync(buffer, (uint)stream.Size, InputStreamOptions.None); return(buffer); } }
public async Task <byte[]> EncodedBytes(byte[] bitmapArray) { if (bitmapArray == null) { return(null); } SoftwareBitmap bitmapBgra8 = SoftwareBitmap.Convert(await bitmapArray.ToSoftwareBitmapAsync(), BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); byte[] array = null; // First: Use an encoder to copy from SoftwareBitmap to an in-mem stream (FlushAsync) // Next: Use ReadAsync on the in-mem stream to get byte[] array using (var ms = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms); encoder.SetSoftwareBitmap(bitmapBgra8); try { await encoder.FlushAsync(); } catch (Exception ex) { return(new byte[0]); } array = new byte[ms.Size]; await ms.ReadAsync(array.AsBuffer(), (uint)ms.Size, InputStreamOptions.None); } return(array); }
public static async Task <byte[]> EncodedBytes(SoftwareBitmap soft, Guid encoderId) //see https://stackoverflow.com/questions/31188479/converting-a-videoframe-to-a-byte-array { byte[] array = null; // First: Use an encoder to copy from SoftwareBitmap to an in-mem stream (FlushAsync) // Next: Use ReadAsync on the in-mem stream to get byte[] array using (var ms = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(encoderId, ms); encoder.SetSoftwareBitmap(soft); try { await encoder.FlushAsync(); } catch (Exception e) { System.Diagnostics.Debug.WriteLine(e.Message); return(new byte[0]); } array = new byte[ms.Size]; await ms.ReadAsync(array.AsBuffer(), (uint)ms.Size, InputStreamOptions.None); } return(array); }
public async Task <byte[]> RotateImageAsync(byte[] originalImage, SideOrientation orientation, ImageFormat imageFormat) { byte[] resultImage = null; WriteableBitmap bitmapImage = await originalImage.ToBitmapImageAsync(); MemoryStream memoryStream = new MemoryStream(originalImage); using (IRandomAccessStream randomAccessStream = memoryStream.AsRandomAccessStream()) { BitmapDecoder decoder = await BitmapDecoder.CreateAsync(randomAccessStream); var resizedStream = new InMemoryRandomAccessStream(); BitmapEncoder encoder = await BitmapEncoder.CreateForTranscodingAsync(resizedStream, decoder); encoder.BitmapTransform.InterpolationMode = BitmapInterpolationMode.Linear; encoder.BitmapTransform.Rotation = orientation == SideOrientation.RotateToRigth ? BitmapRotation.Clockwise90Degrees : BitmapRotation.Clockwise270Degrees; await encoder.FlushAsync(); resizedStream.Seek(0); resultImage = new byte[resizedStream.Size]; await resizedStream.ReadAsync(resultImage.AsBuffer(), (uint)resizedStream.Size, InputStreamOptions.None); } return(resultImage); }
public async static Task <Uri> ToQrDataUri(this ISdp sdp, int width, int height) { var qrCodeWriter = new QRCodeWriter(); var bitMatrix = qrCodeWriter.encode(sdp.ToString(), ZXing.BarcodeFormat.QR_CODE, width, height); using (var canvasRenderTarget = new CanvasRenderTarget(CanvasDevice.GetSharedDevice(), 500, 500, 96)) { using (var drawingSession = canvasRenderTarget.CreateDrawingSession()) { for (var y = 0; y < height; y++) { for (var x = 0; x < width; x++) { drawingSession.DrawRectangle(x, y, 1, 1, bitMatrix.get(x, y) ? Color.FromArgb(0, 0, 0, 0) : Color.FromArgb(255, 255, 255, 255)); } } } using (var inMemoryRandomAccessStream = new InMemoryRandomAccessStream()) { await canvasRenderTarget.SaveAsync(inMemoryRandomAccessStream, CanvasBitmapFileFormat.Png); inMemoryRandomAccessStream.Seek(0); var buffer = new byte[inMemoryRandomAccessStream.Size]; await inMemoryRandomAccessStream.ReadAsync(buffer.AsBuffer(), (uint)inMemoryRandomAccessStream.Size, InputStreamOptions.None); return(new Uri($"data:image/png;base64,{Convert.ToBase64String(buffer)}")); } } }
private async Task <byte[]> SaveSoftwareBitmapToBufferAsync(SoftwareBitmap softwareBitmap) { byte[] bytes = null; try { IRandomAccessStream stream = new InMemoryRandomAccessStream(); { // Create an encoder with the desired format BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.BmpEncoderId, stream); encoder.SetSoftwareBitmap(softwareBitmap); encoder.IsThumbnailGenerated = false; await encoder.FlushAsync(); bytes = new byte[stream.Size]; // This returns IAsyncOperationWithProgess, so you can add additional progress handling await stream.ReadAsync(bytes.AsBuffer(), (uint)stream.Size, Windows.Storage.Streams.InputStreamOptions.None); } } catch (Exception ex) { Debug.WriteLine(ex); } return(bytes); }
public async Task <string> Classify(SoftwareBitmap image) { // First: Use an encoder to copy from SoftwareBitmap to an in-mem stream (FlushAsync) // Next: Use ReadAsync on the in-mem stream to get byte[] array //1) Convert SoftwareBitMap byte[] array = null; using (var ms = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms); encoder.SetSoftwareBitmap(image); try { await encoder.FlushAsync(); } catch (Exception ex) { throw new NotImplementedException(); } array = new byte[ms.Size]; await ms.ReadAsync(array.AsBuffer(), (uint)ms.Size, InputStreamOptions.None); } string response = string.Empty; //Use mutlipartformdata to send frame to classifier api var containerURL = @"http://127.0.0.1:4000/image"; response = await MultiForm_GetJsonData(containerURL, array); // parse response and get highest probability, if none, return null return(response); }
async void OnFrameSampleAcquired(VideoCaptureSample sample) { if (frameProccessed == false) { cnt_out += 1; return; } cnt_in += 1; Debug.Log("cnt : in = " + cnt_in.ToString() + ", out = " + cnt_out); frameProccessed = false; Debug.Log("Frame sample acquired"); bool mappable = true; float[] cameraToWorldMatrixAsFloat; float[] projectionMatrixAsFloat; mappable &= sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat); mappable &= sample.TryGetProjectionMatrix(out projectionMatrixAsFloat); //when copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized. //you can reuse this byte[] until you need to resize it(for whatever reason). byte[] latestImageBytes = null; System.Diagnostics.Stopwatch st = new System.Diagnostics.Stopwatch(); st.Start(); using (var ms = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms); encoder.SetSoftwareBitmap(sample.Bitmap); try { await encoder.FlushAsync(); } catch (Exception err) { Debug.LogError(err.Message); return; } latestImageBytes = new byte[ms.Size]; await ms.ReadAsync(latestImageBytes.AsBuffer(), (uint)ms.Size, InputStreamOptions.None); } st.Stop(); Debug.Log("encoding time " + st.ElapsedMilliseconds.ToString()); // Right now we pass things across the pipe as a float array then convert them back into UnityEngine.Matrix using a utility method if (mappable) { st.Restart(); cameraToWorld = CameraStreamHelper.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat); projection = CameraStreamHelper.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat); await SocketManager.Instance.SendPhoto(latestImageBytes); st.Stop(); Debug.Log("network time " + st.ElapsedMilliseconds.ToString()); BoundingBox[] boxes = await SocketManager.Instance.RecvDetections(); SceneUnderstanding.Instance.RecvDetections(cameraToWorld, projection, boxes, mappable); } frameProccessed = true; }
//private async void btnCapture_OnClick(object sender, RoutedEventArgs e) //{ // // Acknowledge that the user has triggered a button to capture a barcode. // this.lblMsg.Text = "-------"; // // Capture the photo from the camera to a storage-file. // ImageEncodingProperties fmtImage = ImageEncodingProperties.CreateJpeg(); // StorageLibrary libPhoto = await StorageLibrary.GetLibraryAsync(KnownLibraryId.Pictures); // StorageFile storefile = // await libPhoto.SaveFolder.CreateFileAsync("BarcodePhoto.jpg", // CreationCollisionOption.ReplaceExisting); // await this.captureMgr.CapturePhotoToStorageFileAsync(fmtImage, storefile); // // Tell the user that we have taken a picture. // this.lblMsg.Text = "Picture taken"; //} /// <summary> /// uses memory stream /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void btnCapture_OnClick(object sender, RoutedEventArgs e) { // Acknowledge that the user has triggered a button to capture a barcode. this.lblMsg.Text = "-------"; // Capture the photo from the camera to a storage-file. ImageEncodingProperties fmtImage = ImageEncodingProperties.CreateBmp(); byte[] bytes; using (var captureStream = new InMemoryRandomAccessStream()) { await this.captureMgr.CapturePhotoToStreamAsync(fmtImage, captureStream); captureStream.Seek(0); bytes = new byte[captureStream.Size]; await captureStream.ReadAsync(bytes.AsBuffer(), (uint)captureStream.Size, InputStreamOptions.None); //var decoder = await BitmapDecoder.CreateAsync(captureStream); //var pixelData = await decoder.GetPixelDataAsync(); //bytes = pixelData.DetachPixelData(); } var response = await MakeOCRRequest(bytes); bool found = false; if (response["regions"] != null && ((JArray)response["regions"]).Count > 0) { var region = ((JArray)response["regions"])[0]; if (region["lines"] != null && ((JArray)region["lines"]).Count > 0) { var line = ((JArray)region["lines"])[0]; if (line["words"] != null && ((JArray)line["words"]).Count > 0) { var word = ((JArray)line["words"])[0]; found = true; var data = new { found, word }; this.device.SendDeviceToCloudMessagesAsync(JsonConvert.SerializeObject(data), found); // Tell the user that we have taken a picture. this.lblMsg.Text = Regex.Replace(JsonConvert.SerializeObject(word), @"\t|\n|\r|\s", ""); } } } if (!found) { var data = new { found = false, response }; this.device.SendDeviceToCloudMessagesAsync(JsonConvert.SerializeObject(data), false); this.lblMsg.Text = Regex.Replace(response.ToString(), @"\t|\n|\r|\s", ""); } // Tell the user that we have taken a picture. }
public static async Task <byte[]> ResizeImageWindows(MediaFile file, float width, float height) { byte[] resizedData; Stream stream = file.GetStream(); stream.Position = 0; byte[] buffer = new byte[stream.Length]; for (int totalBytesCopied = 0; totalBytesCopied < stream.Length;) { totalBytesCopied += stream.Read(buffer, totalBytesCopied, Convert.ToInt32(stream.Length) - totalBytesCopied); } using (var streamIn = new MemoryStream(buffer)) { using (var imageStream = streamIn.AsRandomAccessStream()) { var decoder = await BitmapDecoder.CreateAsync(imageStream); var resizedStream = new InMemoryRandomAccessStream(); var encoder = await BitmapEncoder.CreateForTranscodingAsync(resizedStream, decoder); encoder.BitmapTransform.InterpolationMode = BitmapInterpolationMode.Linear; encoder.BitmapTransform.ScaledHeight = (uint)height; encoder.BitmapTransform.ScaledWidth = (uint)width; await encoder.FlushAsync(); resizedStream.Seek(0); resizedData = new byte[resizedStream.Size]; await resizedStream.ReadAsync(resizedData.AsBuffer(), (uint)resizedStream.Size, InputStreamOptions.None); } } return(resizedData); }
public static async Task <byte[]> Convert(SoftwareBitmap image) { byte[] byteData = null; if (image == null) { return(new byte[0]); } using (var ms = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms); encoder.SetSoftwareBitmap(image); try { await encoder.FlushAsync(); } catch (Exception ex) { System.Diagnostics.Trace.WriteLine(ex.Message); return(new byte[0]); } byteData = new byte[ms.Size]; await ms.ReadAsync(byteData.AsBuffer(), (uint)ms.Size, InputStreamOptions.None); } // Request body. Posts a locally stored JPEG image. return(byteData); }
public static async Task <byte[]> ResizeImageWindows(byte[] imageData, float width, float height) { byte[] resizedData; using (var streamIn = new MemoryStream(imageData)) { using (var imageStream = streamIn.AsRandomAccessStream()) { var decoder = await BitmapDecoder.CreateAsync(imageStream); var resizedStream = new InMemoryRandomAccessStream(); var encoder = await BitmapEncoder.CreateForTranscodingAsync(resizedStream, decoder); encoder.BitmapTransform.InterpolationMode = BitmapInterpolationMode.Linear; encoder.BitmapTransform.ScaledHeight = (uint)height; encoder.BitmapTransform.ScaledWidth = (uint)width; await encoder.FlushAsync(); resizedStream.Seek(0); resizedData = new byte[resizedStream.Size]; await resizedStream.ReadAsync(resizedData.AsBuffer(), (uint)resizedStream.Size, InputStreamOptions.None); } } return(resizedData); }
async private void AppBarButton_Click(object sender, RoutedEventArgs e) { try { var vm = this.DataContext as CameraCapturePageViewModel; var imageEncodingProps = ImageEncodingProperties.CreatePng(); using (var stream = new InMemoryRandomAccessStream()) { await _mediaCapture.CapturePhotoToStreamAsync(imageEncodingProps, stream); _bytes = new byte[stream.Size]; var buffer = await stream.ReadAsync(_bytes.AsBuffer(), (uint)stream.Size, InputStreamOptions.None); _bytes = buffer.ToArray(0, (int)stream.Size); if (vm.ImageSource == null) { vm.ImageSource = new BitmapImage(); } stream.Seek(0); await vm.ImageSource.SetSourceAsync(stream); Retake.Visibility = Windows.UI.Xaml.Visibility.Visible; Take.Visibility = Windows.UI.Xaml.Visibility.Collapsed; await _mediaCapture.StopPreviewAsync(); } } catch (Exception) { throw; } }
public static SKImage SoftwareBitmapToSKImage(SoftwareBitmap bitmap) { using InMemoryRandomAccessStream randomAccessStream = new InMemoryRandomAccessStream(); var task = BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, randomAccessStream); while (task.Status == AsyncStatus.Started) { Thread.Sleep(50); } BitmapEncoder encoder = task.GetResults(); encoder.SetSoftwareBitmap(SoftwareBitmap.Convert(bitmap, BitmapPixelFormat.Rgba8)); try { var Task = encoder.FlushAsync(); while (Task.Status == AsyncStatus.Started) { Thread.Sleep(50); } } catch { } byte[] array = new byte[randomAccessStream.Size]; var ReadTask = randomAccessStream.ReadAsync(array.AsBuffer(), (uint)randomAccessStream.Size, InputStreamOptions.None); while (ReadTask.Status == AsyncStatus.Started) { Thread.Sleep(50); } return(SKImage.FromEncodedData(array)); }
/// <summary> /// Get bitmap from current frame of video using RenderTarget method. /// Only used if video is not already downloaded. /// </summary> /// <returns>Byte array representing bitmap</returns> public async Task <byte[]> GetBitmapFromRenderTarget() { var tweet = new RenderTargetBitmap(); await tweet.RenderAsync(HostElement); var pixels = await tweet.GetPixelsAsync(); InMemoryRandomAccessStream randomAccessStream = new InMemoryRandomAccessStream(); var be = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, randomAccessStream); be.SetPixelData( BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore, (uint)tweet.PixelWidth, (uint)tweet.PixelHeight, 92.0, 92.0, pixels.ToArray()); await be.FlushAsync(); var bytes = new byte[randomAccessStream.Size]; await randomAccessStream.ReadAsync(bytes.AsBuffer(), (uint)randomAccessStream.Size, InputStreamOptions.None); return(bytes); }
private async Task <byte[]> EncodedBytes(SoftwareBitmap soft, Guid encoderId) { byte[] array = null; // First: Use an encoder to copy from SoftwareBitmap to an in-mem stream (FlushAsync) // Next: Use ReadAsync on the in-mem stream to get byte[] array using (var ms = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(encoderId, ms); encoder.SetSoftwareBitmap(soft); try { await encoder.FlushAsync(); } catch { return(new byte[0]); } array = new byte[ms.Size]; await ms.ReadAsync(array.AsBuffer(), (uint)ms.Size, InputStreamOptions.None); } return(array); }
private async Task makeJpegAsync(BitmapDecoder decoder, byte[] pixelBytes, pdfPage page, BitmapImage bitmapImage, int x, int y) { //double jpegImageQuality = Constants.ImageAttachStartingImageQuality; double jpegImageQuality = 0.9; ulong jpegImageSize = 0; var imageWriteableStream = new InMemoryRandomAccessStream(); //MemoryStream memoryStream = new MemoryStream(); //var imageWriteableStream = memoryStream.AsRandomAccessStream(); using (imageWriteableStream) { var propertySet = new BitmapPropertySet(); var qualityValue = new BitmapTypedValue(jpegImageQuality, Windows.Foundation.PropertyType.Single); propertySet.Add("ImageQuality", qualityValue); var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, imageWriteableStream, propertySet); //key thing here is to use decoder.OrientedPixelWidth and decoder.OrientedPixelHeight otherwise you will get garbled image on devices on some photos with orientation in metadata encoder.SetPixelData(decoder.BitmapPixelFormat, decoder.BitmapAlphaMode, decoder.OrientedPixelWidth, decoder.OrientedPixelHeight, decoder.DpiX, decoder.DpiY, pixelBytes); jpegImageSize = imageWriteableStream.Size; await encoder.FlushAsync(); await imageWriteableStream.FlushAsync(); var byteArray = new byte[imageWriteableStream.Size]; await imageWriteableStream.ReadAsync(byteArray.AsBuffer(), (uint)imageWriteableStream.Size, InputStreamOptions.None); //page.addImage(bitmapImage, byteArray, x, y); page.addImage(imageWriteableStream); } }
private async Task BeginRecording() { while (true) { try { Debug.WriteLine($"Recording started"); var memoryStream = new InMemoryRandomAccessStream(); await _mediaCap.StartRecordToStreamAsync(MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Vga), memoryStream); await Task.Delay(TimeSpan.FromSeconds(5)); await _mediaCap.StopRecordAsync(); Debug.WriteLine($"Recording finished, {memoryStream.Size} bytes"); memoryStream.Seek(0); CurrentVideo.Id = Guid.NewGuid(); CurrentVideo.Data = new byte[memoryStream.Size]; await memoryStream.ReadAsync(CurrentVideo.Data.AsBuffer(), (uint)memoryStream.Size, InputStreamOptions.None); Debug.WriteLine($"Bytes written to stream"); _signal.Set(); _signal.Reset(); } catch (Exception ex) { Debug.WriteLine($"StartRecording -> {ex.Message}"); break; } } }
public async Task <byte[]> ResizeImageAsync(byte[] originalImage, int newHeight, int newWidth, ImageFormat imageFormat) { byte[] resultImage = null; WriteableBitmap bitmapImage = await originalImage.ToBitmapImageAsync(); MemoryStream memoryStream = new MemoryStream(originalImage); using (IRandomAccessStream randomAccessStream = memoryStream.AsRandomAccessStream()) { BitmapDecoder decoder = await BitmapDecoder.CreateAsync(randomAccessStream); var resizedStream = new InMemoryRandomAccessStream(); BitmapEncoder encoder = await BitmapEncoder.CreateForTranscodingAsync(resizedStream, decoder); encoder.BitmapTransform.InterpolationMode = BitmapInterpolationMode.Linear; encoder.BitmapTransform.ScaledHeight = (uint)newHeight; encoder.BitmapTransform.ScaledWidth = (uint)newWidth; await encoder.FlushAsync(); resizedStream.Seek(0); resultImage = new byte[resizedStream.Size]; await resizedStream.ReadAsync(resultImage.AsBuffer(), (uint)resizedStream.Size, InputStreamOptions.None); } return(resultImage); }
public async void StartVideoRecordingOnThread(StreamSocket _socket) { //Make sure the MediaCapture object is initialized await CheckSetUp(); Streamer streamer = new Streamer(_socket); // When the streamer is connected, create a new Output stream using the streamer isRecording = true; while (true) { InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream(); await _mediaCapture.StartRecordToStreamAsync(MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Vga), stream); await Task.Delay(TimeSpan.FromSeconds(1)); await _mediaCapture.StopRecordAsync(); stream.Seek(0); Windows.Storage.Streams.Buffer buffer = new Windows.Storage.Streams.Buffer((uint)stream.Size); await stream.ReadAsync(buffer, (uint)stream.Size, Windows.Storage.Streams.InputStreamOptions.None); streamer.WriteToSocketUsingReader(buffer); } }
public static async Task <byte[]> GetBytesFromSoftwareBitmap(this SoftwareBitmap softwareBitmap, Guid encoderId) { byte[] array = null; // First: Use an encoder to copy from SoftwareBitmap to an in-mem stream (FlushAsync) // Next: Use ReadAsync on the in-mem stream to get byte[] array using (InMemoryRandomAccessStream imras = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(encoderId, imras); encoder.SetSoftwareBitmap(softwareBitmap); try { await encoder.FlushAsync(); } catch (Exception ex) { Console.WriteLine(ex); return(new byte[0]); } array = new byte[imras.Size]; await imras.ReadAsync(array.AsBuffer(), (uint)imras.Size, InputStreamOptions.None); } return(array); }
protected async Task <byte[]> ConvertImageBufferToJpegBytes(IBuffer imageBuffer) { using (var stream = imageBuffer.AsStream().AsRandomAccessStream()) { var decoder = await BitmapDecoder.CreateAsync(stream); var pixels = await decoder.GetPixelDataAsync(); using (var output = new InMemoryRandomAccessStream()) { var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, output); await CoreWindow.GetForCurrentThread().Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { encoder.SetPixelData(decoder.BitmapPixelFormat, BitmapAlphaMode.Ignore, decoder.OrientedPixelWidth, decoder.OrientedPixelHeight, decoder.DpiX, decoder.DpiY, pixels.DetachPixelData()); }); await encoder.FlushAsync(); var buffer = WindowsRuntimeBuffer.Create((int)output.Size); output.Seek(0); await output.ReadAsync(buffer, (uint)output.Size, InputStreamOptions.None); return(buffer.ToArray()); } } }
public async Task <byte[]> EncodeBitmap(object bitmap) { byte[] array = null; var softwareBitmap = bitmap as SoftwareBitmap; if (softwareBitmap == null) { return(array = new byte[0]); } using (var ms = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms); encoder.SetSoftwareBitmap(softwareBitmap); try { await encoder.FlushAsync(); } catch { return(new byte[0]); } array = new byte[ms.Size]; await ms.ReadAsync(array.AsBuffer(), (uint)ms.Size, InputStreamOptions.None); } return(array); }
internal static async Task <byte[]> ImportPDF(string filename) { var f = await StorageFile.GetFileFromPathAsync(filename); using (var stream = await f.OpenReadAsync()) { var d = await PdfDocument.LoadFromStreamAsync(stream); using (var page0 = d.GetPage(0)) { byte[] content; await page0.PreparePageAsync(); using (var randomAccessStream = new InMemoryRandomAccessStream()) { await page0.RenderToStreamAsync(randomAccessStream, new PdfPageRenderOptions { BitmapEncoderId = BitmapEncoder.PngEncoderId }); content = new byte[randomAccessStream.Size]; await randomAccessStream.ReadAsync(content.AsBuffer(), (uint)randomAccessStream.Size, InputStreamOptions.None); return(content); } } } }
/// <summary> /// This function is for send the capture picture to Azure backend. /// </summary> private async Task SendPhotoAsync() { nowTime = DateTime.Now; if (nowTime.Subtract(preTime).TotalSeconds < 10) { return; } if (_isSendingPhoto) { return; } _isSendingPhoto = true; preTime = DateTime.Now; InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream(); try { Debug.WriteLine("Taking photo..."); await _mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), stream); Debug.WriteLine("Photo taken!"); byte[] bytes = new byte[stream.Size]; stream.Seek(0); var buffer = await stream.ReadAsync(bytes.AsBuffer(), (uint)stream.Size, InputStreamOptions.None); bytes = buffer.ToArray(); Debug.WriteLine("bytes ok!"); //Upload the picture using HTTP request HttpResponseMessage response = await httpPostRequest(bytes); string playContent = string.Empty; if (response.IsSuccessStatusCode) { string resultContent = response.Content.ReadAsStringAsync().Result; Stream mStream = new MemoryStream(Encoding.UTF8.GetBytes(resultContent)); DataContractJsonSerializer ser = new DataContractJsonSerializer(typeof(VisitorResultObject)); VisitorResultObject result = (VisitorResultObject)ser.ReadObject(mStream); playContent = handleVisitorResult(result); } else { playContent = "Request Error"; } await this.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { this.infoshowText.Text = playContent; PlayTTS(playContent); }); } catch (Exception ex) { Debug.WriteLine("Exception when taking a photo: {0}", ex.ToString()); } _isSendingPhoto = false; }
public static async Task <(byte[] decodedBytes, uint newWidth, uint newHeight)> ResizeImageAsync(this BitmapDecoder decoder, uint maximumWidth, uint maximumHeight) { (byte[] decodedBytes, uint newWidth, uint newHeight)returnValue = (null, 0, 0); if (decoder.PixelHeight > maximumHeight || decoder.PixelWidth > maximumWidth) { using (InMemoryRandomAccessStream resizedStream = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateForTranscodingAsync(resizedStream, decoder); double widthRatio = (double)maximumWidth / decoder.PixelWidth; double heightRatio = (double)maximumHeight / decoder.PixelHeight; double scaleRatio = Math.Min(widthRatio, heightRatio); if (maximumWidth == 0) { scaleRatio = heightRatio; } if (maximumHeight == 0) { scaleRatio = widthRatio; } returnValue.newHeight = (uint)Math.Floor(decoder.PixelHeight * scaleRatio); returnValue.newWidth = (uint)Math.Floor(decoder.PixelWidth * scaleRatio); encoder.BitmapTransform.InterpolationMode = BitmapInterpolationMode.Linear; encoder.BitmapTransform.ScaledHeight = returnValue.newHeight; encoder.BitmapTransform.ScaledWidth = returnValue.newWidth; await encoder.FlushAsync(); resizedStream.Seek(0); byte[] resizedEncodedBytes = new byte[resizedStream.Size]; await resizedStream.ReadAsync(resizedEncodedBytes.AsBuffer(), (uint)resizedStream.Size, InputStreamOptions.None); using (MemoryStream memoryStream = new MemoryStream(resizedEncodedBytes)) { using (IRandomAccessStream imageStream = memoryStream.AsRandomAccessStream()) { BitmapDecoder decoder2 = await BitmapDecoder.CreateAsync(imageStream); PixelDataProvider data = await decoder2.GetPixelDataAsync(); returnValue.decodedBytes = data.DetachPixelData(); } } } } return(returnValue); }
public void PrintMicrophoneSample() { MediaCapture capture; IRandomAccessStream stream; const int BufferSize = 64000; bool recording; float volume = 100; capture = new MediaCapture(); stream = new InMemoryRandomAccessStream(); var captureInitSettings2 = new MediaCaptureInitializationSettings(); captureInitSettings2.StreamingCaptureMode = StreamingCaptureMode.Audio; capture.InitializeAsync(captureInitSettings2).AsTask().Wait(); capture.AudioDeviceController.VolumePercent = volume; MediaEncodingProfile profile = new MediaEncodingProfile(); AudioEncodingProperties audioProperties = AudioEncodingProperties.CreatePcm(16000, 1, 16); profile.Audio = audioProperties; profile.Video = null; profile.Container = new ContainerEncodingProperties() { Subtype = MediaEncodingSubtypes.Wave }; capture.StartRecordToStreamAsync(profile, stream).GetResults(); recording = true; // waste time for (int i = 0; i < 5; i++) { i = i * 232323 + 89;// WriteLine(i); } capture.StopRecordAsync().GetResults(); byte[] wav = new byte[stream.Size]; stream.Seek(0); stream.ReadAsync(wav.AsBuffer(), (uint)stream.Size, InputStreamOptions.None).GetResults(); int sum = 0; for (int i = 0; i < wav.Count(); i++) { sum += (int)wav[i]; } WriteLine((double)wav.Count() / sum); }