public void OnImageAvailable(ImageReader reader) { Image image = null; try { image = reader.AcquireLatestImage(); ByteBuffer buffer = image.GetPlanes()[0].Buffer; byte[] bytes = new byte[buffer.Capacity()]; buffer.Get(bytes); Save(bytes); this.tcs.TrySetResult(bytes); } catch (Java.Lang.Exception ex) { this.tcs.TrySetException(ex); } catch (System.Exception ex) { this.tcs.TrySetException(ex); } finally { if (image != null) { image.Close(); } } }
public void OnImageAvailable(ImageReader reader) { Image image = null; try { image = reader.AcquireLatestImage(); ByteBuffer buffer = image.GetPlanes()[0].Buffer; byte[] bytes = new byte[buffer.Capacity()]; buffer.Get(bytes); Save(bytes); } catch (FileNotFoundException ex) { Log.WriteLine(LogPriority.Info, "Camera capture session", ex.StackTrace); } catch (IOException ex) { Log.WriteLine(LogPriority.Info, "Camera capture session", ex.StackTrace); } finally { if (image != null) { image.Close(); } } }
public void OnImageAvailable(ImageReader reader) { if (_renderer.CancellationToken.IsCancellationRequested) { return; } // get the byte array data from the first plane // of the image. This is sufficient for a JPEG // image Image image = reader.AcquireLatestImage(); if (image != null) { Image.Plane[] planes = image.GetPlanes(); ByteBuffer buffer = planes[0].Buffer; byte[] bytes = new byte[buffer.Capacity()]; buffer.Get(bytes); // close the image so we can handle another image later image.Close(); (_renderer.Element as CameraFeed)?.ProcessPhoto(bytes); // keeps capturing images until the annotations have // been processed into ssid and password sucessfully _renderer.CurrentContext.RunOnUiThread(async() => { try { await Task.Delay(750, _renderer.CancellationToken); } catch (TaskCanceledException) { return; } _renderer.CaptureImage(); }); } }
public IPattern AcquirePattern() { var createNewPattern = false; lock (_syncLock) { if (_newImgAvailable) { createNewPattern = true; _newImgAvailable = false; } } if (createNewPattern) { var latestImage = _imageReader.AcquireLatestImage(); if (latestImage != null) { try { _lastestPattern?.Dispose(); _lastestPattern = MatFromImage(latestImage); } finally { // Close is required for ImageReader. Dispose doesn't work. latestImage.Close(); } } } return(_lastestPattern); }
public void OnImageAvailable(ImageReader reader) { Image image = null; try { this.tracer.Debug("OnImageAvailable: ImageReader.AcquireLatestImage"); image = reader.AcquireLatestImage(); ByteBuffer buffer = image.GetPlanes()[0].Buffer; byte[] bytes = new byte[buffer.Capacity()]; buffer.Get(bytes); this.Save(bytes); } catch (Exception ex) { this.tracer.Exception(ex, "OnImageAvailable failed with exception."); } finally { if (image != null) { image.Close(); } } }
void ImageReader.IOnImageAvailableListener.OnImageAvailable ( ImageReader imageReader ) { Image latestImage = null; try { OutputStream outputStream = null; latestImage = imageReader.AcquireLatestImage(); Image.Plane[] imagePlanes = latestImage.GetPlanes(); ByteBuffer byteBuffer = imagePlanes[0].Buffer; byte[] imageBytes = new byte[byteBuffer.Capacity()]; byteBuffer.Get(imageBytes); try { outputStream = new FileOutputStream(_imageFile); outputStream.Write(imageBytes); } finally { outputStream?.Close(); } } finally { latestImage?.Close(); } }
public void OnImageAvailable(ImageReader reader) { Image image = null; try { image = reader.AcquireLatestImage(); var buffer = image.GetPlanes()[0].Buffer; var imageData = new byte[buffer.Capacity()]; buffer.Get(imageData); //diremark dl //Photo?.Invoke(this, imageData); //OnImageAvailable-> call processImage } catch (Exception) { //do nothing } finally { image?.Close(); } }
public void OnImageAvailable(ImageReader reader) { var image = reader.AcquireLatestImage(); if (Owner.capturingImage) { if (image != null) { image.Close(); } return; } Owner.capturingImage = true; ByteBuffer buffer = image.GetPlanes()[0].Buffer; byte[] bytes = new byte[buffer.Remaining()]; buffer.Get(bytes); image.Close(); if (Owner.mBackgroundHandler != null) { Owner.mBackgroundHandler.Post(new ImageSaver(bytes, File, scanModule) { Owner = Owner }); } }
public void OnImageAvailable(ImageReader reader) { Image image = null; try { image = reader.AcquireLatestImage(); ByteBuffer buffer = image.GetPlanes()[0].Buffer; byte[] imageData = new byte[buffer.Capacity()]; buffer.Get(imageData); Photo?.Invoke(this, imageData); } catch (Exception ex) { Debug.WriteLine($" [ImageAvailableListener] OnImageAvailable {ex.Message} {ex.StackTrace}"); } finally { if (image != null) { image.Close(); } } }
public void OnImageAvailable(ImageReader reader) { Image image = null; try { image = reader.AcquireLatestImage(); var buffer = image.GetPlanes()[0].Buffer; var imageData = new byte[buffer.Capacity()]; buffer.Get(imageData); var imgBitmap = rotateImage(imageData, _orientation); MediaStore.Images.Media.InsertImage(CrossCurrentActivity.Current.Activity.ContentResolver, imgBitmap, "Test", "DuyQK"); MemoryStream stream = new MemoryStream(); imgBitmap.Compress(Bitmap.CompressFormat.Png, 0, stream); byte[] bytepData = stream.ToArray(); Photo?.Invoke(this, imageData); } catch (Exception) { // ignored } finally { image?.Close(); } }
public void OnImageAvailable(ImageReader reader) { Android.Media.Image image = null; Android.Graphics.Bitmap bitmap = null; try { image = reader.AcquireLatestImage(); if (image != null) { Image.Plane[] planes = image.GetPlanes(); ByteBuffer buffer = planes[0].Buffer; int offset = 0; int pixelStride = planes[0].PixelStride; int rowStride = planes[0].RowStride; int rowPadding = rowStride - pixelStride * ForegroundService.mWidth; // create bitmap bitmap = Android.Graphics.Bitmap.CreateBitmap(ForegroundService.mWidth + rowPadding / pixelStride, ForegroundService.mHeight, Android.Graphics.Bitmap.Config.Argb8888); bitmap.CopyPixelsFromBuffer(buffer); image.Close(); using (System.IO.MemoryStream fos = new System.IO.MemoryStream()) { bitmap.Compress(Android.Graphics.Bitmap.CompressFormat.Jpeg, kalite, fos); byte[] dataPacker = ForegroundService._globalService.MyDataPacker("LIVESCREEN", StringCompressor.Compress(fos.ToArray()), ID); try { if (screenSock != null) { screenSock.Send(dataPacker, 0, dataPacker.Length, SocketFlags.None); System.Threading.Tasks.Task.Delay(1).Wait(); } } catch (Exception) { } } } } catch (Exception ex) { try { byte[] dataPacker = ForegroundService._globalService.MyDataPacker("ERRORLIVESCREEN", System.Text.Encoding.UTF8.GetBytes(ex.Message)); ForegroundService.Soketimiz.BeginSend(dataPacker, 0, dataPacker.Length, SocketFlags.None, null, null); } catch (Exception) { } ForegroundService._globalService.stopProjection(); } finally { if (bitmap != null) { bitmap.Recycle(); } if (image != null) { image.Close(); } } }
public void OnImageAvailable(ImageReader reader) { Image image = reader.AcquireLatestImage(); if (image == null) { return; } Image.Plane[] planes = image.GetPlanes(); int totalLength = 0; for (int i = 0; i < planes.Length; i++) { Java.Nio.ByteBuffer buffer = planes[i].Buffer; totalLength += buffer.Remaining(); } if (_data == null || _data.Length != totalLength) { _data = new byte[totalLength]; } int offset = 0; for (int i = 0; i < planes.Length; i++) { Java.Nio.ByteBuffer buffer = planes[i].Buffer; int length = buffer.Remaining(); buffer.Get(_data, offset, length); offset += length; } if (_yuv420Converter == null) { _yuv420Converter = new YUV420Converter(Android.App.Application.Context); } if (_bitmapSrcBuffer[_bitmapBufferIdx] == null || image.Width != (_bitmapSrcBuffer[_bitmapBufferIdx].Width) || (image.Height != _bitmapSrcBuffer[_bitmapBufferIdx].Height)) { _bitmapSrcBuffer[_bitmapBufferIdx] = Bitmap.CreateBitmap(image.Width, image.Height, Bitmap.Config.Argb8888); } Bitmap bmpSrc = _bitmapSrcBuffer[_bitmapBufferIdx]; _yuv420Converter.YUV_420_888_toRGBIntrinsics(image.Width, image.Height, _data, bmpSrc); if (OnImageProcessed != null) { OnImageProcessed(reader, bmpSrc); } _bitmapBufferIdx = (_bitmapBufferIdx + 1) % _bitmapSrcBuffer.Length; image.Close(); //image.Dispose(); }
/// <summary> /// 开始截图 /// </summary> /// <param name="reader"></param> public void OnImageAvailable(ImageReader reader) { var image = reader.AcquireLatestImage(); if (image != null) { _bitmap = ToBitmap(image); image.Close(); TearDownMediaProjection(); volatileDispose?.setAndNotify(_bitmap); } }
public void OnImageAvailable(ImageReader reader) { var image = reader.AcquireLatestImage(); if (image != null) { ImageAvailable?.Invoke(this, image); } else { Debug.WriteLine("Image is null"); } }
public void OnImageAvailable(ImageReader reader) { Android.Media.Image image = null; Android.Graphics.Bitmap bitmap = null; try { image = reader.AcquireLatestImage(); if (image != null) { Image.Plane[] planes = image.GetPlanes(); ByteBuffer buffer = planes[0].Buffer; int offset = 0; int pixelStride = planes[0].PixelStride; int rowStride = planes[0].RowStride; int rowPadding = rowStride - pixelStride * MainActivity.mWidth; // create bitmap bitmap = Android.Graphics.Bitmap.CreateBitmap(MainActivity.mWidth + rowPadding / pixelStride, MainActivity.mHeight, Android.Graphics.Bitmap.Config.Argb8888); bitmap.CopyPixelsFromBuffer(buffer); image.Close(); using (System.IO.MemoryStream fos = new System.IO.MemoryStream()) { bitmap.Compress(Android.Graphics.Bitmap.CompressFormat.Jpeg, kalite, fos); try { byte[] dataPacker = ((MainActivity)MainActivity.global_activity).MyDataPacker("LIVESCREEN", StringCompressor.Compress(fos.ToArray())); MainActivity.Soketimiz.BeginSend(dataPacker, 0, dataPacker.Length, SocketFlags.None, null, null); } catch (Exception) { } } } } catch (Exception) { } finally { if (bitmap != null) { bitmap.Recycle(); } if (image != null) { image.Close(); } } }
public void OnImageAvailable(ImageReader reader) { byte[] y = null; byte[] u = null; byte[] v = null; int yRowStride = 0, uvRowStride = 0, uvPixelStride = 0; int width = 0, height = 0; using (var image = reader.AcquireLatestImage()) { try { if (image == null) { return; } // Get the three image planes Image.Plane[] planes = image.GetPlanes(); ByteBuffer buffer = planes[0].Buffer; y = new byte[buffer.Remaining()]; buffer.Get(y); buffer = planes[1].Buffer; u = new byte[buffer.Remaining()]; buffer.Get(u); buffer = planes[2].Buffer; v = new byte[buffer.Remaining()]; buffer.Get(v); // get the relevant RowStrides and PixelStrides // (we know from documentation that PixelStride is 1 for y) yRowStride = planes[0].RowStride; uvRowStride = planes[1].RowStride; // we know from documentation that RowStride is the same for u and v. uvPixelStride = planes[1].PixelStride; // we know from documentation that PixelStride is the same for u and v. width = image.Width; height = image.Height; } finally { image?.Close(); } } HandleYUV(y, u, v, width, height, yRowStride, uvRowStride, uvPixelStride); }
public void OnImageAvailable(ImageReader reader) { if (_currentImage != null) { return; } _currentImage = reader.AcquireLatestImage(); ByteBuffer byteBuffer = _currentImage.GetPlanes()[0].Buffer; byte[] imageBytes = new byte[byteBuffer.Remaining()]; byteBuffer.Get(imageBytes); FixImage(imageBytes); _currentImage.Close(); _currentImage = null; }
//private static int IMAGES_PRODUCED = 0; public void OnImageAvailable(ImageReader reader) { Android.Media.Image image = null; Android.Graphics.Bitmap bitmap = null; try { image = reader.AcquireLatestImage(); if (image != null) { Image.Plane[] planes = image.GetPlanes(); ByteBuffer buffer = planes[0].Buffer; int offset = 0; int pixelStride = planes[0].PixelStride; int rowStride = planes[0].RowStride; int rowPadding = rowStride - pixelStride * MainActivity.mWidth; // create bitmap bitmap = Android.Graphics.Bitmap.CreateBitmap(MainActivity.mWidth + rowPadding / pixelStride, MainActivity.mHeight, Android.Graphics.Bitmap.Config.Argb8888); bitmap.CopyPixelsFromBuffer(buffer); image.Close(); using (System.IO.MemoryStream fos = new System.IO.MemoryStream()) { bitmap.Compress(Android.Graphics.Bitmap.CompressFormat.Jpeg, kalite, fos); ((MainActivity)MainActivity.global_activity).soketimizeGonder("LIVESCREEN", $"[VERI]{Convert.ToBase64String(fos.ToArray())}[0x09]"); //System.IO.File.WriteAllBytes(Android.OS.Environment.ExternalStorageDirectory.AbsolutePath + "/myscreen_" + IMAGES_PRODUCED + ".jpg", fos.ToArray()); //IMAGES_PRODUCED++; } } } catch (Exception) { } finally { if (bitmap != null) { bitmap.Recycle(); } if (image != null) { image.Close(); } } }
public IPattern TakeScreenshot() { var latestImage = _imageReader.AcquireLatestImage(); if (latestImage != null) { try { _lastestPattern?.Dispose(); _lastestPattern = MatFromImage(latestImage); } finally { // Close is required for ImageReader. Dispose doesn't work. latestImage.Close(); } } return(_lastestPattern); }
public void OnImageAvailable(ImageReader reader) { Image image = null; try { image = reader.AcquireLatestImage(); ByteBuffer buffer = image.GetPlanes()[0].Buffer; byte[] bytes = new byte[buffer.Capacity()]; buffer.Get(bytes); ImageProcessingCompleted?.Invoke(this, bytes); } finally { if (image != null) { image.Close(); } } }
public void OnImageAvailable(ImageReader reader) { Image image = null; try { image = reader.AcquireLatestImage(); var buffer = image.GetPlanes()[0].Buffer; var imageData = new byte[buffer.Capacity()]; buffer.Get(imageData); Photo?.Invoke(this, imageData); } catch (Exception) { // ignored } finally { image?.Close(); } }
/// <summary> /// Ons the image available. /// </summary> /// <param name="reader">Reader.</param> public void OnImageAvailable(ImageReader reader) { Image image = null; try { image = reader.AcquireLatestImage(); ByteBuffer buffer = image.GetPlanes()[0].Buffer; byte[] imageData = new byte[buffer.Capacity()]; buffer.Get(imageData); Photo?.Invoke(this, imageData); } catch (Exception ex) { } finally { if (image != null) { image.Close(); } } }
public void OnImageAvailable(ImageReader reader) { Image image = reader.AcquireLatestImage(); if (image == null) { return; } Image.Plane[] planes = image.GetPlanes(); int totalLength = 0; for (int i = 0; i < planes.Length; i++) { Java.Nio.ByteBuffer buffer = planes[i].Buffer; totalLength += buffer.Remaining(); } if (_data == null || _data.Length != totalLength) { _data = new byte[totalLength]; } int offset = 0; for (int i = 0; i < planes.Length; i++) { Java.Nio.ByteBuffer buffer = planes[i].Buffer; int length = buffer.Remaining(); buffer.Get(_data, offset, length); offset += length; } if (_yuv420Converter == null) { _yuv420Converter = new YUV420Converter(Android.App.Application.Context); } if (_bitmapSrcBuffer[_bitmapBufferIdx] == null || image.Width != (_bitmapSrcBuffer[_bitmapBufferIdx].Width) || (image.Height != _bitmapSrcBuffer[_bitmapBufferIdx].Height)) { _bitmapSrcBuffer[_bitmapBufferIdx] = Bitmap.CreateBitmap(image.Width, image.Height, Bitmap.Config.Argb8888); } Bitmap bmpSrc = _bitmapSrcBuffer[_bitmapBufferIdx]; _yuv420Converter.YUV_420_888_toRGBIntrinsics(image.Width, image.Height, _data, bmpSrc); using (Mat m = new Mat(bmpSrc.Height, bmpSrc.Width, DepthType.Cv8U, 4, bmpSrc.LockPixels(), bmpSrc.Width * 4)) { bmpSrc.UnlockPixels(); CvInvoke.CvtColor(m, _bgrMat, ColorConversion.Bgra2Bgr); //Rotate 90 degree by transpose and flip CvInvoke.Transpose(_bgrMat, _rotatedMat); CvInvoke.Flip(_rotatedMat, _rotatedMat, FlipType.Horizontal); //apply a simple invert filter //CvInvoke.BitwiseNot(_rotatedMat, _rotatedMat); } if (OnImageProcessed != null) { OnImageProcessed(reader, _rotatedMat); } _bitmapBufferIdx = (_bitmapBufferIdx + 1) % _bitmapSrcBuffer.Length; image.Close(); //image.Dispose(); }
public void OnImageAvailable(ImageReader reader) { //if (Owner.CLEAR_CANVAS) // Owner.mBackgroundHandler.Post(new ImageSaver(reader.AcquireNextImage(), Owner.mFile)); if (Owner.isBusy) { /* * Image mImage = reader.AcquireLatestImage(); * * ByteBuffer buffer = mImage.GetPlanes()[0].Buffer; * byte[] bytes = new byte[buffer.Remaining()]; * buffer.Get(bytes); * using (var output = new FileOutputStream(Owner.mFile)) * { * try * { * output.Write(bytes); * Android.Util.Log.Error("ImgAvailListener", "\t\tWROTE FILE "+ Owner.mFile.AbsolutePath); * } * catch (IOException e) * { * e.PrintStackTrace(); * } * finally * { * mImage.Close(); * } */ return; //} } //Android.Util.Log.Error("ImgAvailListener", "OnImageAvailable"); Image image = reader.AcquireLatestImage(); if (image != null) { ByteBuffer rgb = image.GetPlanes()[0].Buffer; //ByteBuffer Y = image.GetPlanes()[0].Buffer; //ByteBuffer U = image.GetPlanes()[1].Buffer; //ByteBuffer V = image.GetPlanes()[2].Buffer; byte[] data = new byte[rgb.Remaining()]; rgb.Get(data); /* * int p; * int size = image.Height * image.Width; * byte[] pixels = new byte[size]; * * for (int i = 0; i < size; i++) * { * p = data[i] & 0xFF; * //pixels[i] = 0xFF000000 | p << 16 | p << 8 | p; * //pixels[i] = (byte) Color.Argb(0xFF, p, p, p); * pixels[i] = (byte)Color.Rgb(p, p, p); * } * Mat imgMat = new Mat(image.Height, image.Width, Emgu.CV.CvEnum.DepthType.Cv8U, 1); */ Mat imgMat = new Mat(image.Height, image.Width, Emgu.CV.CvEnum.DepthType.Cv8U, 3); CvInvoke.Imdecode(data, Emgu.CV.CvEnum.ImreadModes.Unchanged, imgMat); Android.Util.Log.Error("ImgAvailListener", "\t\timage sent:" + image.Width + "," + image.Height); image.Close(); Owner.PreviewFrame(imgMat); } }
//public File File { get; private set; } //public Camera2BasicFragment Owner { get; private set; } public void OnImageAvailable(ImageReader reader) { owner.mBackgroundHandler.Post(new ImageSaver(reader.AcquireLatestImage(), file)); }
//WHen the picture is avilable void ImageReader.IOnImageAvailableListener.OnImageAvailable(ImageReader reader) { saveImage(reader.AcquireLatestImage()); }
public void OnImageAvailable(ImageReader reader) { Image image = reader.AcquireLatestImage(); if (image == null) { return; } Image.Plane[] planes = image.GetPlanes(); int totalLength = 0; for (int i = 0; i < planes.Length; i++) { Java.Nio.ByteBuffer buffer = planes[i].Buffer; totalLength += buffer.Remaining(); } if (_data == null || _data.Length != totalLength) { _data = new byte[totalLength]; } int offset = 0; for (int i = 0; i < planes.Length; i++) { Java.Nio.ByteBuffer buffer = planes[i].Buffer; int length = buffer.Remaining(); buffer.Get(_data, offset, length); offset += length; } GCHandle handle = GCHandle.Alloc(_data, GCHandleType.Pinned); using (Mat m = new Mat( new System.Drawing.Size(image.Width, image.Height + image.Height / 2), DepthType.Cv8U, 1, handle.AddrOfPinnedObject(), image.Width)) { if (_gComputation != null) { _gComputation.Apply(m, _rotatedMat); } else { CvInvoke.CvtColor(m, _bgrMat, ColorConversion.Yuv2RgbYv12); //Rotate 90 degree by transpose and flip CvInvoke.Transpose(_bgrMat, _rotatedMat); CvInvoke.Flip(_rotatedMat, _rotatedMat, FlipType.Horizontal); } } handle.Free(); /* * if (_yuv420Converter == null) * _yuv420Converter = new YUV420Converter(Android.App.Application.Context); * * if (_bitmapSrcBuffer[_bitmapBufferIdx] == null || image.Width != (_bitmapSrcBuffer[_bitmapBufferIdx].Width) || (image.Height != _bitmapSrcBuffer[_bitmapBufferIdx].Height)) * { * _bitmapSrcBuffer[_bitmapBufferIdx] = Bitmap.CreateBitmap(image.Width, image.Height, Bitmap.Config.Argb8888); * } * Bitmap bmpSrc = _bitmapSrcBuffer[_bitmapBufferIdx]; * * _yuv420Converter.YUV_420_888_toRGBIntrinsics(image.Width, image.Height, _data, bmpSrc); * * using (Mat m = new Mat(bmpSrc.Height, bmpSrc.Width, DepthType.Cv8U, 4, bmpSrc.LockPixels(), * bmpSrc.Width * 4)) * { * bmpSrc.UnlockPixels(); * CvInvoke.CvtColor(m, _bgrMat, ColorConversion.Bgra2Bgr); * * //Rotate 90 degree by transpose and flip * CvInvoke.Transpose(_bgrMat, _rotatedMat); * CvInvoke.Flip(_rotatedMat, _rotatedMat, FlipType.Horizontal); * * //apply a simple invert filter * //CvInvoke.BitwiseNot(_rotatedMat, _rotatedMat); * }*/ if (OnImageProcessed != null) { OnImageProcessed(reader, _rotatedMat); } //_bitmapBufferIdx = (_bitmapBufferIdx + 1) % _bitmapSrcBuffer.Length; image.Close(); //image.Dispose(); }
private void HandleImageCaptured(ImageReader imageReader) { Java.IO.FileOutputStream fos = null; Java.IO.File imageFile = null; var photoSaved = false; try { var image = imageReader.AcquireLatestImage(); var buffer = image.GetPlanes()[0].Buffer; var data = new byte[buffer.Remaining()]; buffer.Get(data); var bitmap = BitmapFactory.DecodeByteArray(data, 0, data.Length); var widthGreaterThanHeight = bitmap.Width > bitmap.Height; image.Close(); string imageFileName = Guid.NewGuid().ToString(); var storageDir = Android.OS.Environment.GetExternalStoragePublicDirectory(Android.OS.Environment.DirectoryPictures); var storageFilePath = storageDir + Java.IO.File.Separator + "AndroidCamera2Demo" + Java.IO.File.Separator + "Photos"; var folder = new Java.IO.File(storageFilePath); if (!folder.Exists()) { folder.Mkdirs(); } imageFile = new Java.IO.File(storageFilePath + Java.IO.File.Separator + imageFileName + ".jpg"); if (imageFile.Exists()) { imageFile.Delete(); } if (imageFile.CreateNewFile()) { fos = new Java.IO.FileOutputStream(imageFile); using (var stream = new MemoryStream()) { if (bitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, stream)) { //We set the data array to the rotated bitmap. data = stream.ToArray(); fos.Write(data); } else { //something went wrong, let's just save the bitmap without rotation. fos.Write(data); } stream.Close(); photoSaved = true; } } } catch (Exception) { // In a real application we would handle this gracefully, likely alerting the user to the error } finally { if (fos != null) { fos.Close(); } RunOnUiThread(UnlockFocus); } // Request that Android display our image if we successfully saved it if (imageFile != null && photoSaved) { var intent = new Intent(Intent.ActionView); var imageUri = Android.Net.Uri.Parse("file://" + imageFile.AbsolutePath); intent.SetDataAndType(imageUri, "image/*"); StartActivity(intent); } }
public void OnImageAvailable(ImageReader reader) { Image image = reader.AcquireLatestImage(); if (image == null) { return; } Image.Plane[] planes = image.GetPlanes(); int totalLength = 0; for (int i = 0; i < planes.Length; i++) { Java.Nio.ByteBuffer buffer = planes[i].Buffer; totalLength += buffer.Remaining(); } if (_data == null || _data.Length != totalLength) { _data = new byte[totalLength]; } int offset = 0; for (int i = 0; i < planes.Length; i++) { Java.Nio.ByteBuffer buffer = planes[i].Buffer; int length = buffer.Remaining(); buffer.Get(_data, offset, length); offset += length; } if (_yuv420Converter == null) { _yuv420Converter = new YUV420Converter(_activity); } if (_bitmapSrcBuffer[_bitmapBufferIdx] == null || image.Width != (_bitmapSrcBuffer[_bitmapBufferIdx].Width) || (image.Height != _bitmapSrcBuffer[_bitmapBufferIdx].Height)) { _bitmapSrcBuffer[_bitmapBufferIdx] = Bitmap.CreateBitmap(image.Width, image.Height, Bitmap.Config.Argb8888); } Bitmap bmpSrc = _bitmapSrcBuffer[_bitmapBufferIdx]; _yuv420Converter.YUV_420_888_toRGBIntrinsics(image.Width, image.Height, _data, bmpSrc); using (Mat m = new Mat(bmpSrc.Height, bmpSrc.Width, DepthType.Cv8U, 4, bmpSrc.LockPixels(), bmpSrc.Width * 4)) { bmpSrc.UnlockPixels(); CvInvoke.CvtColor(m, _bgrMat, ColorConversion.Bgra2Bgr); //Rotate 90 degree by transpose and flip CvInvoke.Transpose(_bgrMat, _rotatedMat); CvInvoke.Flip(_rotatedMat, _rotatedMat, FlipType.Horizontal); //apply a simple invert filter CvInvoke.BitwiseNot(_rotatedMat, _rotatedMat); } _activity.SetImage(_rotatedMat); _bitmapBufferIdx = (_bitmapBufferIdx + 1) % _bitmapSrcBuffer.Length; /* * GCHandle dataHandle = GCHandle.Alloc(_data, GCHandleType.Pinned); * using (Mat m = new Mat((image.Height << 1) + (image.Height >> 1), image.Width, DepthType.Cv8U, 1, * dataHandle.AddrOfPinnedObject(), image.Width)) * { * CvInvoke.CvtColor(m, _bgrMat, ColorConversion.Yuv420Sp2Bgr); * * if (_activity != null) * _activity.SetImage(_bgrMat); * } * dataHandle.Free(); */ //int bytesPerPixel = ImageFormat.GetBitsPerPixel(Android.Graphics.ImageFormatType.Yuv420888) / 8; //int dataSize = width * height * bytesPerPixel; /* * byte[] rowData = new byte[planes[0].RowStride]; * * for (int i = 0; i < planes.Length; i++) * { * buffer = planes[i].Buffer; * rowStride = planes[i].RowStride; * pixelStride = planes[i].PixelStride; * int w = (i == 0) ? width : width / 2; * int h = (i == 0) ? height : height / 2; * for (int row = 0; row < h; row++) * { * if (pixelStride == bytesPerPixel) * { * int length = w * bytesPerPixel; * buffer.Get(_data, offset, length); * * // Advance buffer the remainder of the row stride, unless on the last row. * // Otherwise, this will throw an IllegalArgumentException because the buffer * // doesn't include the last padding. * if (h - row != 1) * { * buffer.Position(buffer.Position() + rowStride - length); * } * offset += length; * } * else * { * * // On the last row only read the width of the image minus the pixel stride * // plus one. Otherwise, this will throw a BufferUnderflowException because the * // buffer doesn't include the last padding. * if (h - row == 1) * { * buffer.Get(rowData, 0, width - pixelStride + 1); * } * else * { * buffer.Get(rowData, 0, rowStride); * } * * for (int col = 0; col < w; col++) * { * _data[offset++] = rowData[col * pixelStride]; * } * } * } * }*/ image.Close(); //image.Dispose(); }