public void OnPreviewFrame(byte[] data, Android.Hardware.Camera camera) { var outputStream = new MemoryStream(); YuvImage yuvImage = new YuvImage(data, ImageFormat.Nv21, _imageWidth, _imageHeight, null); yuvImage.CompressToJpeg(new Rect(0, 0, _imageWidth, _imageHeight), 50, outputStream); byte[] imageBytes = outputStream.ToArray(); Element.OnPreviewReady(imageBytes); // Parameters parameters = camera.GetParameters(); // var imageFormat = parameters.PreviewFormat; // if (imageFormat == ImageFormatType.Nv21) // { // Rect rect = new Rect(0, 0, liveCameraStream.Width, liveCameraStream.Height); // YuvImage img = new YuvImage(data, ImageFormat.Nv21, liveCameraStream.Width, liveCameraStream.Height, null); // Java.IO.ByteArrayOutputStream baos = new Java.IO.ByteArrayOutputStream(); // Java.IO.ObjectOutputStream oos = new Java.IO.ObjectOutputStream(baos); //oos.writeObject(C1); //oos.flush(); //} }
public static void AddConvertByteBuffer(ref SortedList <float, FrameData> allframes, ByteBuffer bytebuff, long timestamp, SparseArray detected, int width, int height, int quality) { try { var b = new byte[bytebuff.Remaining()]; bytebuff.Get(b); var yuv = new YuvImage(b, ImageFormatType.Nv21, width, height, null); byte[] jpegArray; using (var baos = new MemoryStream()) { yuv.CompressToJpeg(new Rect(0, 0, width, height), quality, baos); // Where 100 is the quality of the generated jpeg jpegArray = baos.ToArray(); } lock (obj) { //if (!allframes.ContainsKey(timestamp)) //can i spped up this check? allframes.Add(timestamp, new FrameData(timestamp, jpegArray, detected)); } } catch (Exception e) { var x = e; } }
private byte[] ConvertYugByetArrayJpegByteArray(byte[] data, Android.Hardware.Camera camera) { var parameters = camera.GetParameters(); int width = parameters.PreviewSize.Width; int height = parameters.PreviewSize.Height; YuvImage yuv = new YuvImage(data, parameters.PreviewFormat, width, height, null); var byteArrayOutputStream = new MemoryStream(); yuv.CompressToJpeg(new Rect(0, 0, width, height), 50, byteArrayOutputStream); return(byteArrayOutputStream.ToArray()); }
private byte[] ConvertYuvToJpeg(byte[] yuvData, Android.Hardware.Camera camera) { var cameraParameters = camera.GetParameters(); var width = cameraParameters.PreviewSize.Width; var height = cameraParameters.PreviewSize.Height; var yuv = new YuvImage(yuvData, cameraParameters.PreviewFormat, width, height, null); var ms = new MemoryStream(); var quality = 80; // adjust this as needed yuv.CompressToJpeg(new Rect(0, 0, width, height), quality, ms); var jpegData = ms.ToArray(); return(jpegData); }
private void HandleOnPreviewFrameReady(object sender, FastJavaByteArray fastArray) { if (!CanAnalyzeFrame) { return; } _wasScanned = false; _lastPreviewAnalysis = DateTime.UtcNow; _processingTask = Task.Run(() => { try { if (TakePicture) { TakePicture = false; var raw = new byte[fastArray.Count]; fastArray.CopyTo(raw, 0); var img = new YuvImage(raw, ImageFormatType.Nv21, _cameraController.Camera.GetParameters().PreviewSize.Width, _cameraController.Camera.GetParameters().PreviewSize.Height, null); using (var stream = new MemoryStream()) { var rect = new Rect(0, 0, img.Width, img.Height); img.CompressToJpeg(rect, 80, stream); var array = stream.ToArray(); OnPictureTaken?.Invoke(this, array); stream.Close(); } } DecodeFrame(fastArray); } catch (Exception ex) { Console.WriteLine(ex); } }).ContinueWith(task => { if (task.IsFaulted) { Android.Util.Log.Debug(MobileBarcodeScanner.TAG, "DecodeFrame exception occurs"); } }, TaskContinuationOptions.OnlyOnFaulted); }
public static Bitmap convertYuvByteArrayToBitmap(byte[] data, Android.Hardware.Camera camera) { try { Android.Hardware.Camera.Parameters parameters = camera.GetParameters(); Android.Hardware.Camera.Size size = parameters.PreviewSize; YuvImage image = new YuvImage(data, parameters.PreviewFormat, size.Width, size.Height, null); System.IO.MemoryStream out_ = new System.IO.MemoryStream(); image.CompressToJpeg(new Rect(0, 0, size.Width, size.Height), int.Parse(MainValues.quality), out_); byte[] imageBytes = out_.ToArray(); out_.Flush(); out_.Close(); out_.Dispose(); return(BitmapFactory.DecodeByteArray(imageBytes, 0, imageBytes.Length)); } catch (Exception) { return(null); } }
private Bitmap convertYuvByteArrayToBitmap(byte[] data, Android.Hardware.Camera camera) { try { Android.Hardware.Camera.Parameters parameters = camera.GetParameters(); Android.Hardware.Camera.Size size = parameters.PreviewSize; using (YuvImage image = new YuvImage(data, parameters.PreviewFormat, size.Width, size.Height, null)) { using (MemoryStream memoryStream = new MemoryStream()) { image.CompressToJpeg(new Rect(0, 0, size.Width, size.Height), int.Parse(MainValues.quality), memoryStream); return(BitmapFactory.DecodeByteArray(memoryStream.ToArray(), 0, memoryStream.ToArray().Length)); } } } catch (Exception) { return(null); } }
public static byte[] ConvertToJpg(this FastJavaByteArray self, int width, int height) { byte[] javaByteArray = new byte[self.Count]; self.CopyTo(javaByteArray, 0); using (YuvImage yuvImage = new YuvImage(javaByteArray, ImageFormatType.Nv21, width, height, null)) { using (Rect rect = new Rect(0, 0, width, height)) { byte[] jpg = null; using (var os = new MemoryStream()) { yuvImage.CompressToJpeg(rect, 100, os); jpg = os.ToArray(); os.Close(); } return(jpg); } } }
public void OnPreviewFrame(byte[] data, Android.Hardware.Camera camera) { if (!isRunning) { EndSession("Remote session ended"); } try { //convert YuvImage(NV21) to JPEG Image data YuvImage yuvimage = new YuvImage(data, Android.Graphics.ImageFormatType.Nv21, deviceWidth, deviceHeight, null); MemoryStream memStream = new MemoryStream(); yuvimage.CompressToJpeg(new Rect(0, 0, deviceWidth, deviceHeight), 50, memStream); byte[] jdata = memStream.ToArray(); frameQueue.Enqueue(jdata); } catch (Exception e) { Log.Debug(TAG, "Error getting preview frame: " + e.Message); } }
public static Bitmap GetBitmap(YuvImage yuvimage, int width, int height) { Bitmap b; try { using (var baos = new MemoryStream()) { yuvimage.CompressToJpeg(new Rect(0, 0, width, height), 100, baos); // Where 100 is the quality of the generated jpeg byte[] jpegArray = baos.ToArray(); //var bitmapoptions = new BitmapFactory.Options { InSampleSize = 2 }; b = BitmapFactory.DecodeByteArray(jpegArray, 0, jpegArray.Length); //, bitmapoptions); } } catch (Exception e) { Log.Error("Utils", "could not get bitmap", e, e.Message); throw new RuntimeException("could not get bitmap"); } return(b); }
public void OnPreviewFrame(byte[] data, Android.Hardware.Camera camera) { try { noFrames = 0; YuvImage yi = new YuvImage(data, camera.GetParameters().PreviewFormat, camera.GetParameters().PreviewSize.Width, camera.GetParameters().PreviewSize.Height, null); byte[][] frames = new byte[fragmentation][]; using MemoryStream ms = new MemoryStream(); yi.CompressToJpeg(new Rect(0, 0, yi.Width, yi.Height), 20, ms); byte[] jpegBytes = ms.ToArray(); UdpFrame frame = new UdpFrame(jpegBytes); FrameRefreshed?.Invoke(frame.packages); } catch (Exception e) { start(); } }
public void OnPreviewFrame(byte[] data, Android.Hardware.Camera camera) { try { var paras = camera.GetParameters(); var imageformat = paras.PreviewFormat; if (data == null) { return; } if (camera == null || camera.GetParameters() == null) { return; } if (renderer == null) { return; } switch (imageformat) { case ImageFormatType.Nv16: case ImageFormatType.Nv21: case ImageFormatType.Yuy2: case ImageFormatType.Yv12: { { YuvImage img = new YuvImage(data, imageformat, paras.PreviewSize.Width, paras.PreviewSize.Height, null); System.IO.MemoryStream outStream = new System.IO.MemoryStream(); if (img.CompressToJpeg(new Rect(0, 0, paras.PreviewSize.Width, paras.PreviewSize.Height), renderer.quality, outStream) == false) { return; } var frameToStream = outStream.ToArray(); var bitmap = BitmapFactory.DecodeByteArray(frameToStream, 0, frameToStream.Length); if (bitmap == null) { return; } var sbitmap = Bitmap.CreateScaledBitmap(bitmap, 320, 240, true); var mat = new Matrix(); if (renderer.currentFacing == Android.Hardware.CameraFacing.Front) { mat.PostRotate(-90); } else { mat.PostRotate(90); } var rbitmap = Bitmap.CreateBitmap(sbitmap, 0, 0, sbitmap.Width, sbitmap.Height, mat, true); if (rbitmap == null) { return; } var soutStream = new System.IO.MemoryStream(); if (rbitmap.Compress(Bitmap.CompressFormat.Jpeg, renderer.quality, soutStream) == false) { return; } Frames.Enqueue(soutStream); renderer.textViewMain.Text = outStream.Length.ToString(); //서버쪽은 임시 주석 if (renderer.server.ImagesSource.Count > 100) { renderer.server.ImagesSource.Clear(); } if (renderer.server._Clients.Count > 0) { renderer.server.ImagesSource.Enqueue(outStream); } if (Frames.Count > 0) { Task.Run(() => { total_bytes_sent += outStream.Length; if (NetProcess.TargetPlayerId.Count > 0) { NetProcess.SendRoomBITMAPMessage(Frames, 0); } Frames.Clear(); }); } } } break; case ImageFormatType.Jpeg: Frames.Enqueue(new System.IO.MemoryStream(data)); if (checktime < DateTime.Now) { if (renderer.server._Clients.Count > 0) { foreach (var frame in Frames) { renderer.server.ImagesSource.Enqueue(frame); } } Frames.Clear(); checktime = DateTime.Now.AddMilliseconds(0); } break; } } catch (System.Exception ex) { System.Console.WriteLine(ex.Message); } }
public void OnPreviewFrame(byte[] data, Android.Hardware.Camera camera) { // throw new NotImplementedException(); // process data from camera(preiew-image on surfaceView) per frame frame_count++; if (frame_count == 4) { frame_count = 0; try { int previewWidth = camera.GetParameters().PreviewSize.Width; int previewHeight = camera.GetParameters().PreviewSize.Height; byte[] rotate_data = YUVRotate(data, previewWidth, previewHeight); //after rotate height became width, and width became height YuvImage image = new YuvImage(rotate_data, ImageFormat.Nv21, previewHeight, previewWidth, null); if (image != null) { System.IO.MemoryStream stream = new System.IO.MemoryStream(); image.CompressToJpeg(new Rect(0, 0, previewHeight, previewWidth), 100, stream); byte[] image_data = stream.ToArray(); Bitmap bitmap_temp = BitmapFactory.DecodeByteArray(image_data, 0, image_data.Length); Bitmap bitmap_result = ZoomImage(bitmap_temp, image_test_width, image_test_height); System.IO.MemoryStream stream_ = new System.IO.MemoryStream(); bitmap_result.Compress(Bitmap.CompressFormat.Jpeg, 100, stream_); byte[] image_data_ = stream_.ToArray(); image = null; image_data = null; bitmap_temp = null; bitmap_result = null; stream = null; stream_ = null; GC.Collect(); string result = PostFunc.MyPost.PostFunc(Convert.ToBase64String(image_data_)); string[] result_split = result.Split(new char[] { ',' }); _textview.Text = result_split[0] + " " + result_split[1] + "%"; if (result_split[0] == "Match!") { draw_flag = true; for (int i = 0; i < 8; i++) { lines[i] = int.Parse(result_split[i + 2]); if (i % 2 == 0) { lines[i] = lines[i] * imageview.Width / image_test_width; } else { lines[i] = lines[i] * imageview.Height / image_test_height; } } imageview.OnDraw(lines, draw_flag); } else { draw_flag = false; imageview.OnDraw(lines, draw_flag); } } } catch (IOException) { } } }
private Bitmap GetProcessedImage(Frame frame, Face face) { int rotationAngle = 0; int width = frame.GetMetadata().Width; int height = frame.GetMetadata().Height; switch (frame.GetMetadata().Rotation) { case FrameRotation.Rotate0: break; case FrameRotation.Rotate90: rotationAngle = 90; break; case FrameRotation.Rotate180: rotationAngle = 180; break; case FrameRotation.Rotate270: rotationAngle = 270; break; } var buffer = frame.GrayscaleImageData; //ByteBuffer buffer = ByteBuffer.Allocate(bitmap.ByteCount); //bitmap.CopyPixelsToBuffer(buffer); buffer.Rewind(); IntPtr classHandle = JNIEnv.FindClass("java/nio/ByteBuffer"); IntPtr methodId = JNIEnv.GetMethodID(classHandle, "array", "()[B"); IntPtr resultHandle = JNIEnv.CallObjectMethod(buffer.Handle, methodId); byte[] bytes = JNIEnv.GetArray <byte>(resultHandle); JNIEnv.DeleteLocalRef(resultHandle); // var bufferSize = frame.GrayscaleImageData.Capacity(); // var buffer = new byte[bufferSize]; // Marshal.Copy(frame.GrayscaleImageData.GetDirectBufferAddress(), buffer, 0, bufferSize); // var bytes = frame.GrayscaleImageData.ToArray<byte>(); YuvImage yuvImage = new YuvImage(bytes, ImageFormatType.Nv21, width, height, null); using (var byteArrayOutputStream = new MemoryStream()) { //frame.Bitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, byteArrayOutputStream); yuvImage.CompressToJpeg(new Rect(0, 0, width, height), 100, byteArrayOutputStream); byte[] jpegArray = byteArrayOutputStream.GetBuffer(); Bitmap bitmap = BitmapFactory.DecodeByteArray(jpegArray, 0, jpegArray.Length); Matrix matrix = new Matrix(); matrix.PostRotate(rotationAngle); Bitmap scaledBitmap = Bitmap.CreateScaledBitmap(bitmap, width, height, true); Bitmap rotatedBitmap = Bitmap.CreateBitmap( scaledBitmap, 0, 0, scaledBitmap.Width, scaledBitmap.Height, matrix, true); return(this.Crop(rotatedBitmap, face)); } }